var/home/core/zuul-output/0000755000175000017500000000000015113762521014530 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015113776030015474 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004562121115113776021017702 0ustar rootrootDec 03 07:27:15 crc systemd[1]: Starting Kubernetes Kubelet... Dec 03 07:27:15 crc restorecon[4582]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 07:27:15 crc restorecon[4582]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 07:27:16 crc restorecon[4582]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 07:27:16 crc restorecon[4582]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 03 07:27:16 crc kubenswrapper[4612]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 07:27:16 crc kubenswrapper[4612]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 03 07:27:16 crc kubenswrapper[4612]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 07:27:16 crc kubenswrapper[4612]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 07:27:16 crc kubenswrapper[4612]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 03 07:27:16 crc kubenswrapper[4612]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.933886 4612 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936685 4612 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936706 4612 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936711 4612 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936716 4612 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936721 4612 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936725 4612 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936730 4612 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936736 4612 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936740 4612 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936745 4612 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936750 4612 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936755 4612 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936760 4612 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936764 4612 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936767 4612 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936771 4612 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936775 4612 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936779 4612 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936783 4612 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936787 4612 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936790 4612 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936794 4612 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936799 4612 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936804 4612 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936809 4612 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936814 4612 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936819 4612 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936823 4612 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936828 4612 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936832 4612 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936836 4612 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936840 4612 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936844 4612 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936848 4612 feature_gate.go:330] unrecognized feature gate: Example Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936851 4612 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936855 4612 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936858 4612 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936862 4612 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936868 4612 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936871 4612 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936876 4612 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936880 4612 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936883 4612 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936887 4612 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936891 4612 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936894 4612 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936898 4612 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936901 4612 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936905 4612 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936908 4612 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936911 4612 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936915 4612 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936918 4612 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936922 4612 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936925 4612 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936929 4612 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936932 4612 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936935 4612 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936939 4612 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936959 4612 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936963 4612 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936967 4612 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936971 4612 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936975 4612 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936980 4612 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936986 4612 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936990 4612 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.936995 4612 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.937000 4612 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.937006 4612 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.937012 4612 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937285 4612 flags.go:64] FLAG: --address="0.0.0.0" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937298 4612 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937305 4612 flags.go:64] FLAG: --anonymous-auth="true" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937311 4612 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937317 4612 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937322 4612 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937328 4612 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937334 4612 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937338 4612 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937343 4612 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937348 4612 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937352 4612 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937357 4612 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937362 4612 flags.go:64] FLAG: --cgroup-root="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937367 4612 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937372 4612 flags.go:64] FLAG: --client-ca-file="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937377 4612 flags.go:64] FLAG: --cloud-config="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937381 4612 flags.go:64] FLAG: --cloud-provider="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937386 4612 flags.go:64] FLAG: --cluster-dns="[]" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937391 4612 flags.go:64] FLAG: --cluster-domain="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937396 4612 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937400 4612 flags.go:64] FLAG: --config-dir="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937404 4612 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937409 4612 flags.go:64] FLAG: --container-log-max-files="5" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937416 4612 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937421 4612 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937426 4612 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937430 4612 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937435 4612 flags.go:64] FLAG: --contention-profiling="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937439 4612 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937444 4612 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937450 4612 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937454 4612 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937460 4612 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937464 4612 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937469 4612 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937473 4612 flags.go:64] FLAG: --enable-load-reader="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937477 4612 flags.go:64] FLAG: --enable-server="true" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937482 4612 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937488 4612 flags.go:64] FLAG: --event-burst="100" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937493 4612 flags.go:64] FLAG: --event-qps="50" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937498 4612 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937502 4612 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937507 4612 flags.go:64] FLAG: --eviction-hard="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937513 4612 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937518 4612 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937523 4612 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937527 4612 flags.go:64] FLAG: --eviction-soft="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937531 4612 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937540 4612 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937544 4612 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937548 4612 flags.go:64] FLAG: --experimental-mounter-path="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937552 4612 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937556 4612 flags.go:64] FLAG: --fail-swap-on="true" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937560 4612 flags.go:64] FLAG: --feature-gates="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937565 4612 flags.go:64] FLAG: --file-check-frequency="20s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937569 4612 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937573 4612 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937577 4612 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937581 4612 flags.go:64] FLAG: --healthz-port="10248" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937585 4612 flags.go:64] FLAG: --help="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937589 4612 flags.go:64] FLAG: --hostname-override="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937594 4612 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937599 4612 flags.go:64] FLAG: --http-check-frequency="20s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937603 4612 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937607 4612 flags.go:64] FLAG: --image-credential-provider-config="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937611 4612 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937616 4612 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937620 4612 flags.go:64] FLAG: --image-service-endpoint="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937624 4612 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937628 4612 flags.go:64] FLAG: --kube-api-burst="100" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937632 4612 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937637 4612 flags.go:64] FLAG: --kube-api-qps="50" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937641 4612 flags.go:64] FLAG: --kube-reserved="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937645 4612 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937649 4612 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937653 4612 flags.go:64] FLAG: --kubelet-cgroups="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937657 4612 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937661 4612 flags.go:64] FLAG: --lock-file="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937665 4612 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937669 4612 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937675 4612 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937681 4612 flags.go:64] FLAG: --log-json-split-stream="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937686 4612 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937690 4612 flags.go:64] FLAG: --log-text-split-stream="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937694 4612 flags.go:64] FLAG: --logging-format="text" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937698 4612 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937702 4612 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937706 4612 flags.go:64] FLAG: --manifest-url="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937710 4612 flags.go:64] FLAG: --manifest-url-header="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937715 4612 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937719 4612 flags.go:64] FLAG: --max-open-files="1000000" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937724 4612 flags.go:64] FLAG: --max-pods="110" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937728 4612 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937732 4612 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937738 4612 flags.go:64] FLAG: --memory-manager-policy="None" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937742 4612 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937747 4612 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937751 4612 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937756 4612 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937765 4612 flags.go:64] FLAG: --node-status-max-images="50" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937769 4612 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937773 4612 flags.go:64] FLAG: --oom-score-adj="-999" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937777 4612 flags.go:64] FLAG: --pod-cidr="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937781 4612 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937788 4612 flags.go:64] FLAG: --pod-manifest-path="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937792 4612 flags.go:64] FLAG: --pod-max-pids="-1" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937796 4612 flags.go:64] FLAG: --pods-per-core="0" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937800 4612 flags.go:64] FLAG: --port="10250" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937804 4612 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937808 4612 flags.go:64] FLAG: --provider-id="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937812 4612 flags.go:64] FLAG: --qos-reserved="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937816 4612 flags.go:64] FLAG: --read-only-port="10255" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937822 4612 flags.go:64] FLAG: --register-node="true" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937826 4612 flags.go:64] FLAG: --register-schedulable="true" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937830 4612 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937837 4612 flags.go:64] FLAG: --registry-burst="10" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937841 4612 flags.go:64] FLAG: --registry-qps="5" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937845 4612 flags.go:64] FLAG: --reserved-cpus="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937849 4612 flags.go:64] FLAG: --reserved-memory="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937854 4612 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937858 4612 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937862 4612 flags.go:64] FLAG: --rotate-certificates="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937866 4612 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937871 4612 flags.go:64] FLAG: --runonce="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937875 4612 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937879 4612 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937883 4612 flags.go:64] FLAG: --seccomp-default="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937888 4612 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937892 4612 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937896 4612 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937900 4612 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937905 4612 flags.go:64] FLAG: --storage-driver-password="root" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937909 4612 flags.go:64] FLAG: --storage-driver-secure="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937913 4612 flags.go:64] FLAG: --storage-driver-table="stats" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937917 4612 flags.go:64] FLAG: --storage-driver-user="root" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937921 4612 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937926 4612 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937930 4612 flags.go:64] FLAG: --system-cgroups="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937934 4612 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937940 4612 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937962 4612 flags.go:64] FLAG: --tls-cert-file="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937966 4612 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937993 4612 flags.go:64] FLAG: --tls-min-version="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.937998 4612 flags.go:64] FLAG: --tls-private-key-file="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.938002 4612 flags.go:64] FLAG: --topology-manager-policy="none" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.938009 4612 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.938013 4612 flags.go:64] FLAG: --topology-manager-scope="container" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.938017 4612 flags.go:64] FLAG: --v="2" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.938023 4612 flags.go:64] FLAG: --version="false" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.938029 4612 flags.go:64] FLAG: --vmodule="" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.938033 4612 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.938038 4612 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938144 4612 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938154 4612 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938159 4612 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938164 4612 feature_gate.go:330] unrecognized feature gate: Example Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938169 4612 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938174 4612 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938179 4612 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938187 4612 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938192 4612 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938196 4612 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938200 4612 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938204 4612 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938209 4612 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938214 4612 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938221 4612 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938224 4612 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938228 4612 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938232 4612 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938236 4612 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938239 4612 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938243 4612 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938247 4612 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938251 4612 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938256 4612 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938261 4612 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938266 4612 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938270 4612 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938274 4612 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938277 4612 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938281 4612 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938285 4612 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938288 4612 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938292 4612 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938295 4612 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938299 4612 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938302 4612 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938306 4612 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938309 4612 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938313 4612 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938318 4612 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938321 4612 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938324 4612 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938328 4612 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938331 4612 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938335 4612 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938338 4612 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938342 4612 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938345 4612 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938348 4612 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938353 4612 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938356 4612 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938360 4612 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938363 4612 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938379 4612 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938383 4612 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938386 4612 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938390 4612 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938395 4612 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938398 4612 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938402 4612 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938405 4612 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938409 4612 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938412 4612 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938415 4612 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938419 4612 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938422 4612 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938425 4612 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938429 4612 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938432 4612 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938436 4612 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.938439 4612 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.938629 4612 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.948033 4612 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.948078 4612 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948169 4612 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948180 4612 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948187 4612 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948194 4612 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948201 4612 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948207 4612 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948213 4612 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948218 4612 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948225 4612 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948231 4612 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948236 4612 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948243 4612 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948252 4612 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948258 4612 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948264 4612 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948269 4612 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948275 4612 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948280 4612 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948286 4612 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948292 4612 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948301 4612 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948310 4612 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948319 4612 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948327 4612 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948334 4612 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948341 4612 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948347 4612 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948354 4612 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948361 4612 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948367 4612 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948373 4612 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948380 4612 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948387 4612 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948394 4612 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948400 4612 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948409 4612 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948416 4612 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948423 4612 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948430 4612 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948439 4612 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948447 4612 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948454 4612 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948461 4612 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948469 4612 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948477 4612 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948484 4612 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948491 4612 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948498 4612 feature_gate.go:330] unrecognized feature gate: Example Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948505 4612 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948510 4612 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948516 4612 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948522 4612 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948527 4612 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948533 4612 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948539 4612 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948545 4612 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948550 4612 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948555 4612 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948560 4612 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948565 4612 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948571 4612 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948576 4612 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948583 4612 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948589 4612 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948595 4612 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948602 4612 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948608 4612 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948614 4612 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948623 4612 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948632 4612 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.948640 4612 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.948652 4612 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949547 4612 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949571 4612 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949575 4612 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949579 4612 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949583 4612 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949586 4612 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949590 4612 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949594 4612 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949598 4612 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949602 4612 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949608 4612 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949612 4612 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949615 4612 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949619 4612 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949622 4612 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949626 4612 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949630 4612 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949635 4612 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949640 4612 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949644 4612 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949648 4612 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949652 4612 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949658 4612 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949661 4612 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949665 4612 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949668 4612 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949672 4612 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949675 4612 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949679 4612 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949682 4612 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949686 4612 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949689 4612 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949694 4612 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949700 4612 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949704 4612 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949710 4612 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949714 4612 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949719 4612 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949723 4612 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949728 4612 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949732 4612 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949736 4612 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949741 4612 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949745 4612 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949750 4612 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949754 4612 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949758 4612 feature_gate.go:330] unrecognized feature gate: Example Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949764 4612 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949768 4612 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949771 4612 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949775 4612 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949780 4612 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949785 4612 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949789 4612 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949793 4612 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949798 4612 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949802 4612 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949807 4612 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949811 4612 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949817 4612 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949821 4612 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949825 4612 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949829 4612 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949833 4612 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949837 4612 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949841 4612 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949845 4612 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949849 4612 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949853 4612 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949856 4612 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 07:27:16 crc kubenswrapper[4612]: W1203 07:27:16.949860 4612 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.949868 4612 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.950107 4612 server.go:940] "Client rotation is on, will bootstrap in background" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.953748 4612 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.953874 4612 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.954632 4612 server.go:997] "Starting client certificate rotation" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.954677 4612 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.954937 4612 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-16 15:42:58.976165389 +0000 UTC Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.955044 4612 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.960959 4612 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 07:27:16 crc kubenswrapper[4612]: E1203 07:27:16.962921 4612 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.144:6443: connect: connection refused" logger="UnhandledError" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.963254 4612 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.971583 4612 log.go:25] "Validated CRI v1 runtime API" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.987053 4612 log.go:25] "Validated CRI v1 image API" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.991077 4612 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.993736 4612 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-03-07-21-57-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 03 07:27:16 crc kubenswrapper[4612]: I1203 07:27:16.993787 4612 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.005931 4612 manager.go:217] Machine: {Timestamp:2025-12-03 07:27:17.003719036 +0000 UTC m=+0.177076456 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2800000 MemoryCapacity:25199480832 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:c9eb3301-3f55-4399-abc8-6d4892c05918 BootID:a3bb5445-5882-4bd7-a327-29d2aa687210 Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599742464 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:33:1b:6b Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:33:1b:6b Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:09:1f:57 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:9b:7c:59 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:fc:25:f4 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:cc:5f:6f Speed:-1 Mtu:1496} {Name:eth10 MacAddress:0a:43:56:80:36:fa Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:e6:2e:90:0d:f6:38 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199480832 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.006560 4612 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.007016 4612 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.008067 4612 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.008580 4612 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.008748 4612 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.009352 4612 topology_manager.go:138] "Creating topology manager with none policy" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.009485 4612 container_manager_linux.go:303] "Creating device plugin manager" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.009899 4612 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.010122 4612 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.010520 4612 state_mem.go:36] "Initialized new in-memory state store" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.011260 4612 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.012657 4612 kubelet.go:418] "Attempting to sync node with API server" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.013031 4612 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.013245 4612 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.013460 4612 kubelet.go:324] "Adding apiserver pod source" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.013632 4612 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 03 07:27:17 crc kubenswrapper[4612]: W1203 07:27:17.015363 4612 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.144:6443: connect: connection refused Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.015457 4612 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.144:6443: connect: connection refused" logger="UnhandledError" Dec 03 07:27:17 crc kubenswrapper[4612]: W1203 07:27:17.015486 4612 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.144:6443: connect: connection refused Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.015595 4612 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.144:6443: connect: connection refused" logger="UnhandledError" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.016472 4612 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.017188 4612 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.018617 4612 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.019719 4612 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.019922 4612 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.020096 4612 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.020226 4612 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.020346 4612 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.020451 4612 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.020565 4612 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.020680 4612 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.020787 4612 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.020891 4612 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.021053 4612 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.021166 4612 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.021584 4612 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.022561 4612 server.go:1280] "Started kubelet" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.022783 4612 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.144:6443: connect: connection refused Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.023313 4612 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.023344 4612 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.024879 4612 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 03 07:27:17 crc systemd[1]: Started Kubernetes Kubelet. Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.027482 4612 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.144:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187da3e5f33899b5 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 07:27:17.022513589 +0000 UTC m=+0.195871029,LastTimestamp:2025-12-03 07:27:17.022513589 +0000 UTC m=+0.195871029,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.028867 4612 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.028905 4612 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.029068 4612 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 05:23:41.453045748 +0000 UTC Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.029098 4612 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1077h56m24.423948929s for next certificate rotation Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.029190 4612 server.go:460] "Adding debug handlers to kubelet server" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.029544 4612 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.029664 4612 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.031389 4612 factory.go:55] Registering systemd factory Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.031422 4612 factory.go:221] Registration of the systemd container factory successfully Dec 03 07:27:17 crc kubenswrapper[4612]: W1203 07:27:17.031686 4612 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.144:6443: connect: connection refused Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.031757 4612 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.144:6443: connect: connection refused" logger="UnhandledError" Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.031795 4612 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.031998 4612 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.032122 4612 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" interval="200ms" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.032268 4612 factory.go:153] Registering CRI-O factory Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.032284 4612 factory.go:221] Registration of the crio container factory successfully Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.032357 4612 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.032384 4612 factory.go:103] Registering Raw factory Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.032412 4612 manager.go:1196] Started watching for new ooms in manager Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.034701 4612 manager.go:319] Starting recovery of all containers Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.055444 4612 manager.go:324] Recovery completed Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.056973 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057047 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057069 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057086 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057106 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057121 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057136 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057153 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057175 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057192 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057211 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057232 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057251 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057274 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057294 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057313 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057330 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057374 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057391 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057406 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057426 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057444 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057462 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057480 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057531 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057548 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057585 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057609 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057628 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057652 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057674 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057691 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057709 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057732 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057750 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057786 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057807 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057825 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057842 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057859 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057877 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057894 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.057911 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058616 4612 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058651 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058676 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058697 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058725 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058746 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058765 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058781 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058800 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058819 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058847 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058866 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058885 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058905 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058924 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058965 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.058985 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059003 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059019 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059035 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059054 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059073 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059093 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059111 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059128 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059143 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059159 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059177 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059195 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059213 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059229 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059247 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059265 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059284 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059301 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059318 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059336 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059357 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059375 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059390 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059409 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059426 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059444 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059460 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059480 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059499 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059518 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059536 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059553 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059571 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059587 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059604 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059621 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059641 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059661 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059680 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059699 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059714 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059733 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059749 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059768 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059786 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059811 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059831 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059850 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059867 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059885 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059902 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059923 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059939 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059980 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.059997 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060017 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060038 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060054 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060071 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060087 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060105 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060123 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060141 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060158 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060174 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060193 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060211 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060231 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060250 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060270 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060286 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060305 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060324 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060343 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060361 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060379 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060396 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060417 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060433 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060450 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060469 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060485 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060501 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060522 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060542 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060562 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060577 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060594 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060609 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060628 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060649 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060667 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060737 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060756 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060772 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060789 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060806 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060823 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060839 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060855 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060874 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060895 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060914 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060932 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060970 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.060989 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061005 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061023 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061041 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061060 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061077 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061096 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061113 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061130 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061148 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061175 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061194 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061212 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061232 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061247 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061264 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061282 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061300 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061319 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061339 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061377 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061396 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061415 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061432 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061450 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061466 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061484 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061504 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061524 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061542 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061561 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061578 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061598 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061616 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061635 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061656 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061675 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061694 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061711 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061764 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.061785 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.063322 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.063347 4612 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.063364 4612 reconstruct.go:97] "Volume reconstruction finished" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.063385 4612 reconciler.go:26] "Reconciler: start to sync state" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.064307 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.066614 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.066647 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.066663 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.069287 4612 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.069311 4612 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.069339 4612 state_mem.go:36] "Initialized new in-memory state store" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.079235 4612 policy_none.go:49] "None policy: Start" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.082700 4612 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.082736 4612 state_mem.go:35] "Initializing new in-memory state store" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.086863 4612 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.088173 4612 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.088206 4612 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.088231 4612 kubelet.go:2335] "Starting kubelet main sync loop" Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.088266 4612 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 03 07:27:17 crc kubenswrapper[4612]: W1203 07:27:17.089395 4612 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.144:6443: connect: connection refused Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.089480 4612 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.144:6443: connect: connection refused" logger="UnhandledError" Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.133210 4612 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.143475 4612 manager.go:334] "Starting Device Plugin manager" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.143536 4612 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.143553 4612 server.go:79] "Starting device plugin registration server" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.145828 4612 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.145850 4612 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.146189 4612 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.146256 4612 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.146264 4612 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.153042 4612 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.188371 4612 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.188740 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.189616 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.189682 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.189695 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.189933 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.190130 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.190167 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.190855 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.190896 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.190904 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.191502 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.191522 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.191530 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.191640 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.192001 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.192033 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.192354 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.192380 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.192391 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.192473 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.192672 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.192713 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.192947 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.192990 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.193002 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.193402 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.193418 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.193427 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.193427 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.193456 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.193469 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.193558 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.193673 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.193721 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.197442 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.197489 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.197500 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.197671 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.197692 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.197793 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.197957 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.198114 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.198367 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.198389 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.198401 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.233518 4612 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" interval="400ms" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.246411 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.247799 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.247905 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.247989 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.248078 4612 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.248770 4612 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.144:6443: connect: connection refused" node="crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266253 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266316 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266350 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266380 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266410 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266437 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266465 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266492 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266522 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266551 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266578 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266605 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266633 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266659 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.266688 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.367600 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.367669 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.367761 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.367814 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.367859 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.367894 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.367911 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.367981 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368011 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.367826 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368030 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368161 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368191 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368197 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368225 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368095 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368263 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368288 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368309 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368349 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368356 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368395 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368400 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368396 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368418 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368466 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368472 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368488 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368435 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.368626 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.449781 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.451049 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.451089 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.451099 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.451124 4612 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.451504 4612 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.144:6443: connect: connection refused" node="crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.517616 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.521132 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: W1203 07:27:17.539698 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-3acc01e56131b67e63044709619198fd62f5ba862635dd1837877db8c0746cfe WatchSource:0}: Error finding container 3acc01e56131b67e63044709619198fd62f5ba862635dd1837877db8c0746cfe: Status 404 returned error can't find the container with id 3acc01e56131b67e63044709619198fd62f5ba862635dd1837877db8c0746cfe Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.542087 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.549557 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.553360 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 07:27:17 crc kubenswrapper[4612]: W1203 07:27:17.575263 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-f5803b6beadb005bc8757ef3c86ea075e3ba97571baf5d8592c23a7ee98906d0 WatchSource:0}: Error finding container f5803b6beadb005bc8757ef3c86ea075e3ba97571baf5d8592c23a7ee98906d0: Status 404 returned error can't find the container with id f5803b6beadb005bc8757ef3c86ea075e3ba97571baf5d8592c23a7ee98906d0 Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.634683 4612 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" interval="800ms" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.851914 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.853631 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.853727 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.853795 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:17 crc kubenswrapper[4612]: I1203 07:27:17.853878 4612 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.854359 4612 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.144:6443: connect: connection refused" node="crc" Dec 03 07:27:17 crc kubenswrapper[4612]: W1203 07:27:17.966554 4612 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.144:6443: connect: connection refused Dec 03 07:27:17 crc kubenswrapper[4612]: E1203 07:27:17.966632 4612 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.144:6443: connect: connection refused" logger="UnhandledError" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.023947 4612 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.144:6443: connect: connection refused Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.095864 4612 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="b61ea23a35c79fcde906bd1219f77e3bcca629c20085b3f31ffd9712080939c1" exitCode=0 Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.095971 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"b61ea23a35c79fcde906bd1219f77e3bcca629c20085b3f31ffd9712080939c1"} Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.096084 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"f5803b6beadb005bc8757ef3c86ea075e3ba97571baf5d8592c23a7ee98906d0"} Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.096170 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.097387 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.097430 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.097444 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.100523 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3"} Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.100570 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5521d9296fd394e6e6340d18b496ecb5b3be852ddfd0554277046cd4f7cc7523"} Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.102528 4612 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346" exitCode=0 Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.102614 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346"} Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.102642 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3596939b6234179556189d390b79b5926dcd4c29f6087180f7775ac09d982fad"} Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.102728 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.103699 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.103728 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.103737 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.105064 4612 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5b2db47f707088e4fe47a06a7a726a02fe00bdc6c12e3b7ed29801bcab661c36" exitCode=0 Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.105119 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5b2db47f707088e4fe47a06a7a726a02fe00bdc6c12e3b7ed29801bcab661c36"} Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.105140 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3acc01e56131b67e63044709619198fd62f5ba862635dd1837877db8c0746cfe"} Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.105096 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.105215 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.106860 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.106881 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.106891 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.106944 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.106985 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.106998 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.108459 4612 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="a15b98f84875450972f1e7b27e9db91e5dd56cfe16eee916a061efcb4d395566" exitCode=0 Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.108508 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"a15b98f84875450972f1e7b27e9db91e5dd56cfe16eee916a061efcb4d395566"} Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.108538 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"de03a1bc45b910ff910e3273f2480bde10f4726435bd66fb5e28aa17b990095d"} Dec 03 07:27:18 crc kubenswrapper[4612]: W1203 07:27:18.129411 4612 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.144:6443: connect: connection refused Dec 03 07:27:18 crc kubenswrapper[4612]: E1203 07:27:18.129500 4612 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.144:6443: connect: connection refused" logger="UnhandledError" Dec 03 07:27:18 crc kubenswrapper[4612]: W1203 07:27:18.269901 4612 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.144:6443: connect: connection refused Dec 03 07:27:18 crc kubenswrapper[4612]: E1203 07:27:18.270034 4612 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.144:6443: connect: connection refused" logger="UnhandledError" Dec 03 07:27:18 crc kubenswrapper[4612]: W1203 07:27:18.402047 4612 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.144:6443: connect: connection refused Dec 03 07:27:18 crc kubenswrapper[4612]: E1203 07:27:18.402125 4612 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.144:6443: connect: connection refused" logger="UnhandledError" Dec 03 07:27:18 crc kubenswrapper[4612]: E1203 07:27:18.436427 4612 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" interval="1.6s" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.655273 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.656477 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.656525 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.656554 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:18 crc kubenswrapper[4612]: I1203 07:27:18.656580 4612 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 07:27:18 crc kubenswrapper[4612]: E1203 07:27:18.657058 4612 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.144:6443: connect: connection refused" node="crc" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.068869 4612 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.113804 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9"} Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.113841 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c"} Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.113851 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278"} Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.113860 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40"} Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.116527 4612 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="da7bb5722f666a39eb03b9453422d480d1b1df8faa10b695ff6ff0a198141411" exitCode=0 Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.116597 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"da7bb5722f666a39eb03b9453422d480d1b1df8faa10b695ff6ff0a198141411"} Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.116746 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.117699 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.117741 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.117752 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.120884 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2cb8d770c695ed6b657d883fc2ab7886409322c493091b02986fe4531dfad3a2"} Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.120909 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"3ee06d2486ad57dcd6368958613d466c55cab0274cee251e869c77e7ca78eb9a"} Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.120920 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"15b14431f707f446113fc666ec20580efa40e4527ea12f5bd9f655b26f1dcb7c"} Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.121027 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.121715 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.121737 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.121745 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.123575 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.123604 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93"} Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.123634 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac"} Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.123647 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7"} Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.123699 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.124633 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.124655 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.124664 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.125137 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.125173 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:19 crc kubenswrapper[4612]: I1203 07:27:19.125185 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.128078 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"e7d1e31d477e141292b64e4e58a77c6f4d5e1dfa9096354dd9484e8a1ff5095e"} Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.128228 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.129333 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.129373 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.129384 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.132655 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47"} Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.132743 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.134006 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.134061 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.134075 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.135349 4612 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="73fb6e6d593720d2287311d8bbedfd7a3611800e9c010ec8851e602a9f4cf31a" exitCode=0 Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.135395 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"73fb6e6d593720d2287311d8bbedfd7a3611800e9c010ec8851e602a9f4cf31a"} Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.135434 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.135577 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.135570 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.135603 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.136174 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.136240 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.136264 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.137076 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.137125 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.137209 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.137517 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.137561 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.137584 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.188780 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.257437 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.258618 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.258695 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.258711 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:20 crc kubenswrapper[4612]: I1203 07:27:20.258757 4612 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.145048 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.145115 4612 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.145159 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.145039 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"10dee7b0efdbea24cb4baabbd451bdfdb1bf8c29cf732df00b72e036e10af52f"} Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.145507 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.145992 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4c0b8f68621bcadcd3eeee61de2c66a73225e5c61109bb170567ecdb0b4126b1"} Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.146073 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ef6e5f00e9c3322b7654e79c069953b3b62750ce84b7c29ee1ecb34f85ffd8db"} Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.146095 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b8159ea63fc49f09bb2b1c31024154ebf71949ab4d9e7c4cc1111ab5b73a91c0"} Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.146111 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"475ad6c8a932a212317e60af8089fe7baef027532302a658556f350a69394657"} Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.146171 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.146472 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.146525 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.146549 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.146604 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.146641 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.146661 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.146472 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.147232 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.147261 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.151421 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.151473 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.151487 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.161795 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.845592 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 03 07:27:21 crc kubenswrapper[4612]: I1203 07:27:21.959390 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:22 crc kubenswrapper[4612]: I1203 07:27:22.041440 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:22 crc kubenswrapper[4612]: I1203 07:27:22.147700 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:22 crc kubenswrapper[4612]: I1203 07:27:22.147970 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:22 crc kubenswrapper[4612]: I1203 07:27:22.148493 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:22 crc kubenswrapper[4612]: I1203 07:27:22.148522 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:22 crc kubenswrapper[4612]: I1203 07:27:22.148533 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:22 crc kubenswrapper[4612]: I1203 07:27:22.148630 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:22 crc kubenswrapper[4612]: I1203 07:27:22.149537 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:22 crc kubenswrapper[4612]: I1203 07:27:22.149580 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:22 crc kubenswrapper[4612]: I1203 07:27:22.149591 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:22 crc kubenswrapper[4612]: I1203 07:27:22.150113 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:22 crc kubenswrapper[4612]: I1203 07:27:22.150220 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:22 crc kubenswrapper[4612]: I1203 07:27:22.150240 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:22 crc kubenswrapper[4612]: I1203 07:27:22.897238 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:23 crc kubenswrapper[4612]: I1203 07:27:23.151855 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:23 crc kubenswrapper[4612]: I1203 07:27:23.151895 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:23 crc kubenswrapper[4612]: I1203 07:27:23.151926 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:23 crc kubenswrapper[4612]: I1203 07:27:23.153762 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:23 crc kubenswrapper[4612]: I1203 07:27:23.153792 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:23 crc kubenswrapper[4612]: I1203 07:27:23.153839 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:23 crc kubenswrapper[4612]: I1203 07:27:23.153859 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:23 crc kubenswrapper[4612]: I1203 07:27:23.153839 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:23 crc kubenswrapper[4612]: I1203 07:27:23.153963 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:23 crc kubenswrapper[4612]: I1203 07:27:23.154011 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:23 crc kubenswrapper[4612]: I1203 07:27:23.154057 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:23 crc kubenswrapper[4612]: I1203 07:27:23.154082 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:24 crc kubenswrapper[4612]: I1203 07:27:24.162402 4612 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 07:27:24 crc kubenswrapper[4612]: I1203 07:27:24.162493 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 07:27:24 crc kubenswrapper[4612]: I1203 07:27:24.911205 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:24 crc kubenswrapper[4612]: I1203 07:27:24.911389 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:24 crc kubenswrapper[4612]: I1203 07:27:24.912746 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:24 crc kubenswrapper[4612]: I1203 07:27:24.912778 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:24 crc kubenswrapper[4612]: I1203 07:27:24.912786 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:25 crc kubenswrapper[4612]: I1203 07:27:25.509567 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 03 07:27:25 crc kubenswrapper[4612]: I1203 07:27:25.509818 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:25 crc kubenswrapper[4612]: I1203 07:27:25.510973 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:25 crc kubenswrapper[4612]: I1203 07:27:25.511008 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:25 crc kubenswrapper[4612]: I1203 07:27:25.511019 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:26 crc kubenswrapper[4612]: I1203 07:27:26.567673 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:26 crc kubenswrapper[4612]: I1203 07:27:26.569142 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:26 crc kubenswrapper[4612]: I1203 07:27:26.571034 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:26 crc kubenswrapper[4612]: I1203 07:27:26.571076 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:26 crc kubenswrapper[4612]: I1203 07:27:26.571089 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:26 crc kubenswrapper[4612]: I1203 07:27:26.572873 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:27 crc kubenswrapper[4612]: E1203 07:27:27.153150 4612 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 07:27:27 crc kubenswrapper[4612]: I1203 07:27:27.162029 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:27 crc kubenswrapper[4612]: I1203 07:27:27.163208 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:27 crc kubenswrapper[4612]: I1203 07:27:27.163255 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:27 crc kubenswrapper[4612]: I1203 07:27:27.163268 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:27 crc kubenswrapper[4612]: I1203 07:27:27.170088 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:28 crc kubenswrapper[4612]: I1203 07:27:28.163833 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:28 crc kubenswrapper[4612]: I1203 07:27:28.165013 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:28 crc kubenswrapper[4612]: I1203 07:27:28.165043 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:28 crc kubenswrapper[4612]: I1203 07:27:28.165050 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:29 crc kubenswrapper[4612]: I1203 07:27:29.024867 4612 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 03 07:27:29 crc kubenswrapper[4612]: E1203 07:27:29.071204 4612 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 03 07:27:29 crc kubenswrapper[4612]: I1203 07:27:29.174440 4612 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 07:27:29 crc kubenswrapper[4612]: I1203 07:27:29.174490 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 07:27:29 crc kubenswrapper[4612]: I1203 07:27:29.178430 4612 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 07:27:29 crc kubenswrapper[4612]: I1203 07:27:29.178478 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 07:27:31 crc kubenswrapper[4612]: I1203 07:27:31.876006 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 03 07:27:31 crc kubenswrapper[4612]: I1203 07:27:31.876269 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:31 crc kubenswrapper[4612]: I1203 07:27:31.877774 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:31 crc kubenswrapper[4612]: I1203 07:27:31.877839 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:31 crc kubenswrapper[4612]: I1203 07:27:31.877862 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:31 crc kubenswrapper[4612]: I1203 07:27:31.899616 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 03 07:27:31 crc kubenswrapper[4612]: I1203 07:27:31.969769 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:31 crc kubenswrapper[4612]: I1203 07:27:31.970098 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:31 crc kubenswrapper[4612]: I1203 07:27:31.970639 4612 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 07:27:31 crc kubenswrapper[4612]: I1203 07:27:31.970724 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 07:27:31 crc kubenswrapper[4612]: I1203 07:27:31.972188 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:31 crc kubenswrapper[4612]: I1203 07:27:31.972260 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:31 crc kubenswrapper[4612]: I1203 07:27:31.972288 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:31 crc kubenswrapper[4612]: I1203 07:27:31.976782 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:32 crc kubenswrapper[4612]: I1203 07:27:32.041849 4612 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 07:27:32 crc kubenswrapper[4612]: I1203 07:27:32.041937 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 07:27:32 crc kubenswrapper[4612]: I1203 07:27:32.173351 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:32 crc kubenswrapper[4612]: I1203 07:27:32.173840 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:32 crc kubenswrapper[4612]: I1203 07:27:32.174228 4612 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 07:27:32 crc kubenswrapper[4612]: I1203 07:27:32.174325 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 07:27:32 crc kubenswrapper[4612]: I1203 07:27:32.174638 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:32 crc kubenswrapper[4612]: I1203 07:27:32.174681 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:32 crc kubenswrapper[4612]: I1203 07:27:32.174698 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:32 crc kubenswrapper[4612]: I1203 07:27:32.174893 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:32 crc kubenswrapper[4612]: I1203 07:27:32.174995 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:32 crc kubenswrapper[4612]: I1203 07:27:32.175076 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:33 crc kubenswrapper[4612]: I1203 07:27:33.134276 4612 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 07:27:33 crc kubenswrapper[4612]: I1203 07:27:33.148133 4612 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 03 07:27:34 crc kubenswrapper[4612]: E1203 07:27:34.156126 4612 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 03 07:27:34 crc kubenswrapper[4612]: I1203 07:27:34.158136 4612 trace.go:236] Trace[1174701081]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 07:27:21.298) (total time: 12859ms): Dec 03 07:27:34 crc kubenswrapper[4612]: Trace[1174701081]: ---"Objects listed" error: 12859ms (07:27:34.158) Dec 03 07:27:34 crc kubenswrapper[4612]: Trace[1174701081]: [12.859575122s] [12.859575122s] END Dec 03 07:27:34 crc kubenswrapper[4612]: I1203 07:27:34.158342 4612 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 07:27:34 crc kubenswrapper[4612]: I1203 07:27:34.158694 4612 trace.go:236] Trace[273775846]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 07:27:21.325) (total time: 12833ms): Dec 03 07:27:34 crc kubenswrapper[4612]: Trace[273775846]: ---"Objects listed" error: 12833ms (07:27:34.158) Dec 03 07:27:34 crc kubenswrapper[4612]: Trace[273775846]: [12.833110669s] [12.833110669s] END Dec 03 07:27:34 crc kubenswrapper[4612]: I1203 07:27:34.158725 4612 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 07:27:34 crc kubenswrapper[4612]: I1203 07:27:34.159814 4612 trace.go:236] Trace[1177726293]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 07:27:20.779) (total time: 13380ms): Dec 03 07:27:34 crc kubenswrapper[4612]: Trace[1177726293]: ---"Objects listed" error: 13380ms (07:27:34.159) Dec 03 07:27:34 crc kubenswrapper[4612]: Trace[1177726293]: [13.38039235s] [13.38039235s] END Dec 03 07:27:34 crc kubenswrapper[4612]: I1203 07:27:34.159865 4612 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 07:27:34 crc kubenswrapper[4612]: I1203 07:27:34.160431 4612 trace.go:236] Trace[173933914]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 07:27:20.919) (total time: 13240ms): Dec 03 07:27:34 crc kubenswrapper[4612]: Trace[173933914]: ---"Objects listed" error: 13240ms (07:27:34.160) Dec 03 07:27:34 crc kubenswrapper[4612]: Trace[173933914]: [13.240399594s] [13.240399594s] END Dec 03 07:27:34 crc kubenswrapper[4612]: I1203 07:27:34.160472 4612 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 07:27:34 crc kubenswrapper[4612]: I1203 07:27:34.163432 4612 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 07:27:34 crc kubenswrapper[4612]: I1203 07:27:34.163483 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 07:27:34 crc kubenswrapper[4612]: E1203 07:27:34.163640 4612 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 03 07:27:34 crc kubenswrapper[4612]: I1203 07:27:34.163913 4612 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 03 07:27:34 crc kubenswrapper[4612]: I1203 07:27:34.671062 4612 csr.go:261] certificate signing request csr-7jlnd is approved, waiting to be issued Dec 03 07:27:34 crc kubenswrapper[4612]: I1203 07:27:34.722556 4612 csr.go:257] certificate signing request csr-7jlnd is issued Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.026516 4612 apiserver.go:52] "Watching apiserver" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.030282 4612 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.030520 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.030864 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.030936 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.030971 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.031009 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.031149 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.031199 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.031239 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.031197 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.031523 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.032780 4612 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.035429 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.035627 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.035833 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.036072 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.037082 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.037272 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.038618 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.043488 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.046411 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069313 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069370 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069401 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069424 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069486 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069509 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069532 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069557 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069584 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069605 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069630 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069871 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069894 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069919 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.069989 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070015 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070040 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070088 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070112 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070137 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070163 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070190 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070215 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070238 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070262 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070289 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070314 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070340 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070367 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070393 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070415 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070439 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070464 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070487 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070512 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070535 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070557 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070580 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070604 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070626 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070651 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070675 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070698 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070721 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070742 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070784 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070810 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070832 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070855 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070878 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070901 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070926 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070967 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.070989 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071011 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071036 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071059 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071080 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071103 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071127 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071150 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071171 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071192 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071216 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071238 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071261 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071284 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071308 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071337 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071363 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071388 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071410 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071434 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071459 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071483 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071509 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071534 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071558 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071582 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071605 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071628 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071651 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071674 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071697 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071721 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071744 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071766 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071791 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071812 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071835 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071859 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071881 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071903 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071926 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071974 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.071999 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072023 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072046 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072071 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072098 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072127 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072152 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072176 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072201 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072226 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072250 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072275 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072300 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072323 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072346 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072372 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072394 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072518 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072547 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072570 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072594 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072620 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072643 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072667 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072690 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072714 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072738 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072761 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072784 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072808 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072834 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072858 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072883 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072908 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072932 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.072977 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073001 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073027 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073051 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073075 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073098 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073126 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073153 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073179 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073203 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073226 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073249 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073274 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073299 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073321 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073343 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073367 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073390 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073417 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073440 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073464 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073488 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073516 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073540 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073563 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073588 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073611 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073634 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073658 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073685 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073709 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073734 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073757 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073783 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073832 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073856 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073881 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073907 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073932 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.073974 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074002 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074029 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074055 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074081 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074106 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074131 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074155 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074180 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074206 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074230 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074256 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074282 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074306 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074330 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074355 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074381 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074406 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074431 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074458 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074482 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074511 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074535 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074561 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074588 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074614 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074666 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074700 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074733 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074759 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074784 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074815 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074842 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074871 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074904 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074932 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.074979 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.075009 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.075035 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.075062 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.075999 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.076056 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.076263 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.076275 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.076288 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.076548 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.076596 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.077017 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.077170 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.077257 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.077352 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.077455 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.077530 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.077655 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.077859 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.078857 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.079259 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.079475 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.080187 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.080285 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.080471 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.080529 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.080833 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.081072 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.081100 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.081217 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.081300 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.081349 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.081483 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.082025 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.082034 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.082254 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.082603 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.082613 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.082923 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.083007 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.083018 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.083260 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.083344 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.083520 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.083882 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.084189 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.084390 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.084780 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.084972 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.085333 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.085451 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.085689 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.085718 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.092124 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.093055 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.096361 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.096607 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.096544 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.096690 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:27:35.596670173 +0000 UTC m=+18.770027573 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.100199 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.100303 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.100533 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.100934 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.096962 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.097386 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.098898 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.099894 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.096840 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.107421 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.107588 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.108162 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.109175 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.110456 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.110482 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.110680 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.110643 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.110988 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.111276 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.111686 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.112182 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.112972 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.114828 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.115081 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.115473 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.115326 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.115893 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.117054 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.117408 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.117593 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.117655 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.117756 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.118205 4612 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.118289 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:35.618269878 +0000 UTC m=+18.791627278 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.118392 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.118198 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.118615 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.119068 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.119340 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.119688 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.120403 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.120463 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.120480 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.120736 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.120752 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.122070 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.126299 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.122160 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.123260 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.123659 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.123696 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.124987 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.125178 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.124847 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.125299 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.125554 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.125626 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.125680 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.125714 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.125813 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.125886 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.126142 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.126345 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.126510 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.126551 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.126585 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.126647 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.126801 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.127372 4612 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.127433 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:35.627416608 +0000 UTC m=+18.800774008 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.127884 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.128072 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.128095 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.128120 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.128077 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.128404 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.128746 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.129286 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.129620 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.129664 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.130250 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.130295 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.130445 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.130467 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.130511 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.130648 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.130667 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.130821 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.130831 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.130959 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.131001 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.131109 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.131147 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.131174 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.131168 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.131356 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.131395 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.131517 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.131563 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.131639 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.132046 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.132624 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.132665 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.132879 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.132927 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.132993 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.134052 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.134323 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.121833 4612 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.135195 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.135275 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.135432 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.135522 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.137312 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.137554 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.137583 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.137704 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.138279 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.138329 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.138417 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.138756 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.139695 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.139779 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.140198 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.140233 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.140551 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.140983 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.141069 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.120986 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.141181 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.141346 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.141399 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.141546 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.141581 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.143146 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.143211 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.146647 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.152979 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.153023 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.153383 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.154357 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.154603 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.157232 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.160094 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.161014 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.163298 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.163547 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.165789 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.166012 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.170854 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.172198 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.172424 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.172446 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.172457 4612 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.172509 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:35.672493292 +0000 UTC m=+18.845850692 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.173411 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.175094 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.175717 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177021 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177089 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177139 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177150 4612 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177160 4612 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177170 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177179 4612 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177187 4612 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177195 4612 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177203 4612 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177211 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177221 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177231 4612 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177254 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177265 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177273 4612 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177282 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177290 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177298 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177306 4612 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177315 4612 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177323 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177333 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177344 4612 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177355 4612 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177366 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177373 4612 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177382 4612 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177390 4612 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177398 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177406 4612 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177414 4612 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177422 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177430 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177438 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177463 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177473 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177484 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177493 4612 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177502 4612 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177513 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177523 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177534 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177544 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177553 4612 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177563 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177572 4612 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177581 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177591 4612 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177599 4612 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177619 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177628 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177636 4612 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177645 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177656 4612 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177666 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177679 4612 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177687 4612 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177697 4612 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177705 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177713 4612 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177722 4612 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177730 4612 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177740 4612 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177748 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177756 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177765 4612 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177773 4612 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177781 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177789 4612 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177798 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177807 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177816 4612 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177825 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177833 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177841 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177849 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177857 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177866 4612 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177873 4612 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177881 4612 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177890 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177899 4612 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177908 4612 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177916 4612 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177925 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177934 4612 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177954 4612 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177962 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177970 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177977 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177985 4612 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.177993 4612 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178000 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178009 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178017 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178025 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178035 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178044 4612 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178053 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178062 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178070 4612 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178079 4612 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178103 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178111 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178119 4612 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178127 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178135 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178143 4612 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178151 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178160 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178168 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178176 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178183 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178191 4612 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178200 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178208 4612 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178216 4612 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178224 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178234 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178243 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178252 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178264 4612 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178271 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178280 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178287 4612 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178295 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178304 4612 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178311 4612 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178322 4612 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178332 4612 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178342 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178351 4612 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178359 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178366 4612 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178374 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178382 4612 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178391 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178400 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178410 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178417 4612 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178426 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178434 4612 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178442 4612 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178450 4612 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178458 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178466 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178474 4612 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178482 4612 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178489 4612 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178497 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178507 4612 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178514 4612 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178522 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178532 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178541 4612 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178549 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178557 4612 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178564 4612 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178572 4612 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178580 4612 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178589 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178599 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178606 4612 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178614 4612 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178622 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178630 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178638 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178646 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178654 4612 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178661 4612 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178669 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178677 4612 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178685 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178693 4612 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178701 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178709 4612 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178716 4612 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178724 4612 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178731 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178739 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178748 4612 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178762 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178772 4612 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178780 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178787 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178796 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178803 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178811 4612 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.178819 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.179131 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.179142 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.179270 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.179298 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.181696 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.182661 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.182879 4612 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47" exitCode=255 Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.184141 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.185149 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.191284 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.194578 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.195709 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.197584 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.198372 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.203152 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.204345 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.208292 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.209016 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.210505 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.211067 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.211097 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.211112 4612 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.211314 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:35.711142102 +0000 UTC m=+18.884499492 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.212581 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.213401 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.214371 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.215806 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.217880 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.219556 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.220091 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.222431 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.223194 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.223787 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.224706 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.224728 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.224738 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.226265 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.227033 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.228081 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.228632 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.229688 4612 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.229854 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.233091 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.234168 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.234770 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.236706 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.238492 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.238806 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.240087 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.241134 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.242819 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.243524 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.244711 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.245930 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.246706 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.247338 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.248361 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.249360 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.250280 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.250884 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.251799 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.252356 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.253398 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.254113 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.254645 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.255609 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47"} Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.261193 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-5g4hj"] Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.261742 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-7xg44"] Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.262925 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.262931 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.263353 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-5g4hj" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.266988 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-d8td2"] Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.267470 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-p52kb"] Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.267778 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.268375 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.272717 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.272730 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.272920 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.273134 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.273585 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.273803 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.274403 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.274564 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.274607 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.274778 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.274790 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.274933 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.275034 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.275066 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.275201 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.279583 4612 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.279611 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.279620 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.279630 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.285628 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.297262 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.306997 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.319038 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.328297 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.339301 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.339709 4612 scope.go:117] "RemoveContainer" containerID="f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.343669 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.344068 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.354437 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 07:27:35 crc kubenswrapper[4612]: W1203 07:27:35.364367 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-5db4540daa0a1089d07bd017d09349aafd55cfe488b05d35576c9ed11b5944d1 WatchSource:0}: Error finding container 5db4540daa0a1089d07bd017d09349aafd55cfe488b05d35576c9ed11b5944d1: Status 404 returned error can't find the container with id 5db4540daa0a1089d07bd017d09349aafd55cfe488b05d35576c9ed11b5944d1 Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.366969 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.367270 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380034 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-system-cni-dir\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380075 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bf87dca8-ceaa-424a-8074-7a63c648b84b-proxy-tls\") pod \"machine-config-daemon-d8td2\" (UID: \"bf87dca8-ceaa-424a-8074-7a63c648b84b\") " pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380096 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380113 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nx5w9\" (UniqueName: \"kubernetes.io/projected/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-kube-api-access-nx5w9\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380132 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-os-release\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380146 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-cni-binary-copy\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380162 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-tuning-conf-dir\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380175 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-etc-kubernetes\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380190 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1007d628-c3fc-4ecf-a0af-86c406fd2290-hosts-file\") pod \"node-resolver-5g4hj\" (UID: \"1007d628-c3fc-4ecf-a0af-86c406fd2290\") " pod="openshift-dns/node-resolver-5g4hj" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380204 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w26dm\" (UniqueName: \"kubernetes.io/projected/bf87dca8-ceaa-424a-8074-7a63c648b84b-kube-api-access-w26dm\") pod \"machine-config-daemon-d8td2\" (UID: \"bf87dca8-ceaa-424a-8074-7a63c648b84b\") " pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380219 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-cnibin\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380242 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-run-netns\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380255 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-hostroot\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380273 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-system-cni-dir\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380292 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-var-lib-kubelet\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380306 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bf87dca8-ceaa-424a-8074-7a63c648b84b-mcd-auth-proxy-config\") pod \"machine-config-daemon-d8td2\" (UID: \"bf87dca8-ceaa-424a-8074-7a63c648b84b\") " pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380325 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-run-k8s-cni-cncf-io\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380340 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-var-lib-cni-multus\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380371 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-multus-cni-dir\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380386 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-multus-socket-dir-parent\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380400 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-os-release\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380414 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-var-lib-cni-bin\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380429 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-cnibin\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380443 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-multus-daemon-config\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380457 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-multus-conf-dir\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380472 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4prvh\" (UniqueName: \"kubernetes.io/projected/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-kube-api-access-4prvh\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380492 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-cni-binary-copy\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380511 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/bf87dca8-ceaa-424a-8074-7a63c648b84b-rootfs\") pod \"machine-config-daemon-d8td2\" (UID: \"bf87dca8-ceaa-424a-8074-7a63c648b84b\") " pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380529 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-run-multus-certs\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.380642 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvzld\" (UniqueName: \"kubernetes.io/projected/1007d628-c3fc-4ecf-a0af-86c406fd2290-kube-api-access-lvzld\") pod \"node-resolver-5g4hj\" (UID: \"1007d628-c3fc-4ecf-a0af-86c406fd2290\") " pod="openshift-dns/node-resolver-5g4hj" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.398674 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: W1203 07:27:35.402609 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-95f22b83d2dd10f156eb6403a0c7d9faeb65c1124418965c475c4a5f658dc1dc WatchSource:0}: Error finding container 95f22b83d2dd10f156eb6403a0c7d9faeb65c1124418965c475c4a5f658dc1dc: Status 404 returned error can't find the container with id 95f22b83d2dd10f156eb6403a0c7d9faeb65c1124418965c475c4a5f658dc1dc Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.425479 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.449882 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.478980 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.481559 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-run-multus-certs\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.481638 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvzld\" (UniqueName: \"kubernetes.io/projected/1007d628-c3fc-4ecf-a0af-86c406fd2290-kube-api-access-lvzld\") pod \"node-resolver-5g4hj\" (UID: \"1007d628-c3fc-4ecf-a0af-86c406fd2290\") " pod="openshift-dns/node-resolver-5g4hj" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.481673 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-system-cni-dir\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.481701 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bf87dca8-ceaa-424a-8074-7a63c648b84b-proxy-tls\") pod \"machine-config-daemon-d8td2\" (UID: \"bf87dca8-ceaa-424a-8074-7a63c648b84b\") " pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.481736 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.481770 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nx5w9\" (UniqueName: \"kubernetes.io/projected/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-kube-api-access-nx5w9\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.481797 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-os-release\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.481831 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-cni-binary-copy\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.481865 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-tuning-conf-dir\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.481892 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-etc-kubernetes\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.481938 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1007d628-c3fc-4ecf-a0af-86c406fd2290-hosts-file\") pod \"node-resolver-5g4hj\" (UID: \"1007d628-c3fc-4ecf-a0af-86c406fd2290\") " pod="openshift-dns/node-resolver-5g4hj" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482258 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w26dm\" (UniqueName: \"kubernetes.io/projected/bf87dca8-ceaa-424a-8074-7a63c648b84b-kube-api-access-w26dm\") pod \"machine-config-daemon-d8td2\" (UID: \"bf87dca8-ceaa-424a-8074-7a63c648b84b\") " pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482293 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-cnibin\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482322 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-run-netns\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482353 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-hostroot\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482383 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-system-cni-dir\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482414 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-var-lib-kubelet\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482442 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bf87dca8-ceaa-424a-8074-7a63c648b84b-mcd-auth-proxy-config\") pod \"machine-config-daemon-d8td2\" (UID: \"bf87dca8-ceaa-424a-8074-7a63c648b84b\") " pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482472 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-run-k8s-cni-cncf-io\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482500 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-var-lib-cni-multus\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482557 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-multus-cni-dir\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482573 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-system-cni-dir\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482607 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-run-multus-certs\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482674 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-multus-socket-dir-parent\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482586 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-multus-socket-dir-parent\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482733 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-os-release\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482742 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-cnibin\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482760 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-var-lib-cni-bin\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482782 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-run-netns\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482789 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-cnibin\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482810 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-multus-daemon-config\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482829 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-multus-conf-dir\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482849 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4prvh\" (UniqueName: \"kubernetes.io/projected/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-kube-api-access-4prvh\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482924 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-cni-binary-copy\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482932 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-etc-kubernetes\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.483026 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/bf87dca8-ceaa-424a-8074-7a63c648b84b-rootfs\") pod \"machine-config-daemon-d8td2\" (UID: \"bf87dca8-ceaa-424a-8074-7a63c648b84b\") " pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482971 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/bf87dca8-ceaa-424a-8074-7a63c648b84b-rootfs\") pod \"machine-config-daemon-d8td2\" (UID: \"bf87dca8-ceaa-424a-8074-7a63c648b84b\") " pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.483065 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-os-release\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.483078 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-var-lib-kubelet\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.483106 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-var-lib-cni-bin\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.483081 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1007d628-c3fc-4ecf-a0af-86c406fd2290-hosts-file\") pod \"node-resolver-5g4hj\" (UID: \"1007d628-c3fc-4ecf-a0af-86c406fd2290\") " pod="openshift-dns/node-resolver-5g4hj" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.483135 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-multus-conf-dir\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.483146 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-cnibin\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482810 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-hostroot\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.483160 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-tuning-conf-dir\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.482830 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-system-cni-dir\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.483333 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-run-k8s-cni-cncf-io\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.483374 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-os-release\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.483565 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-multus-cni-dir\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.483601 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-host-var-lib-cni-multus\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.483636 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-cni-binary-copy\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.484061 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.484320 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bf87dca8-ceaa-424a-8074-7a63c648b84b-mcd-auth-proxy-config\") pod \"machine-config-daemon-d8td2\" (UID: \"bf87dca8-ceaa-424a-8074-7a63c648b84b\") " pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.484321 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-cni-binary-copy\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.484349 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-multus-daemon-config\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.485355 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bf87dca8-ceaa-424a-8074-7a63c648b84b-proxy-tls\") pod \"machine-config-daemon-d8td2\" (UID: \"bf87dca8-ceaa-424a-8074-7a63c648b84b\") " pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.499307 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.507805 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvzld\" (UniqueName: \"kubernetes.io/projected/1007d628-c3fc-4ecf-a0af-86c406fd2290-kube-api-access-lvzld\") pod \"node-resolver-5g4hj\" (UID: \"1007d628-c3fc-4ecf-a0af-86c406fd2290\") " pod="openshift-dns/node-resolver-5g4hj" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.523318 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4prvh\" (UniqueName: \"kubernetes.io/projected/b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d-kube-api-access-4prvh\") pod \"multus-p52kb\" (UID: \"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\") " pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.523579 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nx5w9\" (UniqueName: \"kubernetes.io/projected/7b8cd268-ef6d-4c13-a726-f7e6a9beec58-kube-api-access-nx5w9\") pod \"multus-additional-cni-plugins-7xg44\" (UID: \"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\") " pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.524041 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w26dm\" (UniqueName: \"kubernetes.io/projected/bf87dca8-ceaa-424a-8074-7a63c648b84b-kube-api-access-w26dm\") pod \"machine-config-daemon-d8td2\" (UID: \"bf87dca8-ceaa-424a-8074-7a63c648b84b\") " pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.526072 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.553243 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.585036 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-7xg44" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.594673 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-5g4hj" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.603218 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-p52kb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.608925 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.610787 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.644070 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9krtb"] Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.644894 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.654288 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.659170 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.659673 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.663399 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.664149 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.664297 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.664866 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.700626 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.700768 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-systemd\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.700791 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-etc-openvswitch\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.700819 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.700840 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-systemd-units\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.700861 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.700878 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovn-node-metrics-cert\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.700899 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksmrn\" (UniqueName: \"kubernetes.io/projected/64b21a08-7c39-4c31-a34d-88e74edf88c6-kube-api-access-ksmrn\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.700925 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovnkube-config\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.700963 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-ovn\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.700983 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-openvswitch\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.701002 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-kubelet\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.701017 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-env-overrides\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.701035 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-run-netns\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.701051 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-cni-netd\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.701086 4612 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.701172 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:36.701153153 +0000 UTC m=+19.874510553 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.701167 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.701197 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-slash\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.701217 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-var-lib-openvswitch\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.701258 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:27:36.701249536 +0000 UTC m=+19.874606936 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.701283 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-log-socket\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.701309 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-run-ovn-kubernetes\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.701328 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovnkube-script-lib\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.701360 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.701378 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.701396 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-node-log\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.701412 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-cni-bin\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.701530 4612 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.701533 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.701550 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.701561 4612 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.701570 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:36.701557405 +0000 UTC m=+19.874914805 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.701592 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:36.701579335 +0000 UTC m=+19.874936735 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.724999 4612 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-03 07:22:34 +0000 UTC, rotation deadline is 2026-09-18 08:52:29.807768123 +0000 UTC Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.725056 4612 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6937h24m54.082714079s for next certificate rotation Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.731594 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.803783 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804367 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-systemd-units\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804392 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804411 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovn-node-metrics-cert\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804427 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksmrn\" (UniqueName: \"kubernetes.io/projected/64b21a08-7c39-4c31-a34d-88e74edf88c6-kube-api-access-ksmrn\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804443 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804460 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovnkube-config\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804475 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-ovn\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804489 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-openvswitch\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804519 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-kubelet\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804509 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-systemd-units\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804534 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-env-overrides\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804618 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-run-netns\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804650 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-cni-netd\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804676 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-slash\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804691 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-var-lib-openvswitch\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804729 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-log-socket\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804746 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-run-ovn-kubernetes\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804762 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovnkube-script-lib\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804815 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-node-log\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804831 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-cni-bin\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804849 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-systemd\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804880 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-etc-openvswitch\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.804967 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-etc-openvswitch\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.805004 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-run-netns\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.805011 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-env-overrides\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.805026 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-cni-netd\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.805052 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.805066 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-slash\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.805089 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-var-lib-openvswitch\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.805125 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-log-socket\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.805149 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-run-ovn-kubernetes\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.805844 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovnkube-script-lib\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.805886 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-node-log\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.805925 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-cni-bin\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.805967 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-systemd\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.805993 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-openvswitch\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.806014 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-ovn\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.806017 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovnkube-config\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.806074 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-kubelet\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.806164 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.806194 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.806204 4612 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:35 crc kubenswrapper[4612]: E1203 07:27:35.806247 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:36.806231265 +0000 UTC m=+19.979588765 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.814869 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovn-node-metrics-cert\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.846060 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksmrn\" (UniqueName: \"kubernetes.io/projected/64b21a08-7c39-4c31-a34d-88e74edf88c6-kube-api-access-ksmrn\") pod \"ovnkube-node-9krtb\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.855247 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.885601 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.905462 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.923757 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.942030 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.964859 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:35 crc kubenswrapper[4612]: I1203 07:27:35.986624 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.005792 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.011729 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:36 crc kubenswrapper[4612]: W1203 07:27:36.024794 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64b21a08_7c39_4c31_a34d_88e74edf88c6.slice/crio-09918bbcce6dbd6b285fac5495c8fbfed1580327cc63d01e4880a3b8239f2fb5 WatchSource:0}: Error finding container 09918bbcce6dbd6b285fac5495c8fbfed1580327cc63d01e4880a3b8239f2fb5: Status 404 returned error can't find the container with id 09918bbcce6dbd6b285fac5495c8fbfed1580327cc63d01e4880a3b8239f2fb5 Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.040536 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.186974 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"5db4540daa0a1089d07bd017d09349aafd55cfe488b05d35576c9ed11b5944d1"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.194030 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.199986 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.200417 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.203313 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" event={"ID":"7b8cd268-ef6d-4c13-a726-f7e6a9beec58","Type":"ContainerStarted","Data":"6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.203360 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" event={"ID":"7b8cd268-ef6d-4c13-a726-f7e6a9beec58","Type":"ContainerStarted","Data":"1ecc012f7bb48b03517fd5dbe4e7d3a0a045e14517a7bb476b091180218eb22b"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.204804 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-5g4hj" event={"ID":"1007d628-c3fc-4ecf-a0af-86c406fd2290","Type":"ContainerStarted","Data":"a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.204868 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-5g4hj" event={"ID":"1007d628-c3fc-4ecf-a0af-86c406fd2290","Type":"ContainerStarted","Data":"bb798a8bdd96112f01e22c2c4d0458e4c3fd831f97316ac3e24bc81e5a601bf0"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.206660 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.206703 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.206713 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"95f22b83d2dd10f156eb6403a0c7d9faeb65c1124418965c475c4a5f658dc1dc"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.209613 4612 generic.go:334] "Generic (PLEG): container finished" podID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerID="eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace" exitCode=0 Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.209685 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerDied","Data":"eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.209777 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerStarted","Data":"09918bbcce6dbd6b285fac5495c8fbfed1580327cc63d01e4880a3b8239f2fb5"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.212502 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.212542 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ecdf706262e68ee4b168acf844f4daf3948a4e52baf50f7958595e256ea0936f"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.215077 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-p52kb" event={"ID":"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d","Type":"ContainerStarted","Data":"b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.215155 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-p52kb" event={"ID":"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d","Type":"ContainerStarted","Data":"9547aa5b45fb502fafcd9a9a727df2e90e78e9fe51795e188d3dfd3d202ac422"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.224337 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.224412 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"58bac0881bf19d04af14d52b4eefb0f7a5cbe9a1044cca079811c1680ade5726"} Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.240334 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.263510 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.284515 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.310330 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.342007 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.364638 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.400276 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.416194 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.431363 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.445776 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.457782 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.472089 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.488856 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.516064 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.534936 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.548472 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.567308 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.588193 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.666526 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.698188 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.719540 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.719684 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.719738 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.719756 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:36 crc kubenswrapper[4612]: E1203 07:27:36.719832 4612 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:27:36 crc kubenswrapper[4612]: E1203 07:27:36.719881 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:38.719867467 +0000 UTC m=+21.893224867 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:27:36 crc kubenswrapper[4612]: E1203 07:27:36.720284 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:27:38.720275599 +0000 UTC m=+21.893632999 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:27:36 crc kubenswrapper[4612]: E1203 07:27:36.720348 4612 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:27:36 crc kubenswrapper[4612]: E1203 07:27:36.720371 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:38.720365491 +0000 UTC m=+21.893722891 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:27:36 crc kubenswrapper[4612]: E1203 07:27:36.720430 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:27:36 crc kubenswrapper[4612]: E1203 07:27:36.720446 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:27:36 crc kubenswrapper[4612]: E1203 07:27:36.720457 4612 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:36 crc kubenswrapper[4612]: E1203 07:27:36.720478 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:38.720472144 +0000 UTC m=+21.893829544 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.725210 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.746667 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.763413 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.794604 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:36Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.821229 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:36 crc kubenswrapper[4612]: E1203 07:27:36.821428 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:27:36 crc kubenswrapper[4612]: E1203 07:27:36.821456 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:27:36 crc kubenswrapper[4612]: E1203 07:27:36.821470 4612 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:36 crc kubenswrapper[4612]: E1203 07:27:36.821559 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:38.821536272 +0000 UTC m=+21.994893672 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:36 crc kubenswrapper[4612]: I1203 07:27:36.955854 4612 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.089455 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.089566 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:37 crc kubenswrapper[4612]: E1203 07:27:37.089600 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.089721 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:37 crc kubenswrapper[4612]: E1203 07:27:37.089773 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:27:37 crc kubenswrapper[4612]: E1203 07:27:37.089866 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.093935 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.109272 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.132398 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.152630 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.185058 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.203639 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.215351 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.228253 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44"} Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.230074 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" event={"ID":"7b8cd268-ef6d-4c13-a726-f7e6a9beec58","Type":"ContainerDied","Data":"6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16"} Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.229481 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.229931 4612 generic.go:334] "Generic (PLEG): container finished" podID="7b8cd268-ef6d-4c13-a726-f7e6a9beec58" containerID="6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16" exitCode=0 Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.232760 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerStarted","Data":"ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2"} Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.232787 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerStarted","Data":"d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6"} Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.232797 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerStarted","Data":"3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9"} Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.248259 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.261374 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.280233 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.307174 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.365237 4612 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.370145 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.370180 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.370188 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.370272 4612 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.382532 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.391758 4612 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.391938 4612 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.393382 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.393413 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.393422 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.393435 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.393444 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:37Z","lastTransitionTime":"2025-12-03T07:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.409294 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: E1203 07:27:37.460190 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.464534 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.464561 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.464569 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.464582 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.464592 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:37Z","lastTransitionTime":"2025-12-03T07:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.470037 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: E1203 07:27:37.508028 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.516594 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.518828 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.518851 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.518861 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.518882 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.518892 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:37Z","lastTransitionTime":"2025-12-03T07:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.540298 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: E1203 07:27:37.546723 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.563300 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.563351 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.563363 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.563383 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.563396 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:37Z","lastTransitionTime":"2025-12-03T07:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.575602 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: E1203 07:27:37.592027 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.607169 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.607205 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.607214 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.607229 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.607239 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:37Z","lastTransitionTime":"2025-12-03T07:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.607894 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: E1203 07:27:37.626442 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: E1203 07:27:37.626551 4612 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.628012 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.628058 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.628070 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.628083 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.628092 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:37Z","lastTransitionTime":"2025-12-03T07:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.630414 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.658380 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.686853 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.705247 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.723295 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.731033 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.731057 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.731065 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.731077 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.731088 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:37Z","lastTransitionTime":"2025-12-03T07:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.741080 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.833628 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.833665 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.833674 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.833689 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.833698 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:37Z","lastTransitionTime":"2025-12-03T07:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.936651 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.936682 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.936690 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.936704 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:37 crc kubenswrapper[4612]: I1203 07:27:37.936713 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:37Z","lastTransitionTime":"2025-12-03T07:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.038601 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.038633 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.038643 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.038657 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.038667 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:38Z","lastTransitionTime":"2025-12-03T07:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.140510 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.140554 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.140566 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.140581 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.140594 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:38Z","lastTransitionTime":"2025-12-03T07:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.240670 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerStarted","Data":"5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d"} Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.241004 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerStarted","Data":"0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228"} Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.241066 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerStarted","Data":"a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45"} Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.242177 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.242221 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.242234 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.242249 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.242260 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:38Z","lastTransitionTime":"2025-12-03T07:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.242462 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2"} Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.244760 4612 generic.go:334] "Generic (PLEG): container finished" podID="7b8cd268-ef6d-4c13-a726-f7e6a9beec58" containerID="be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154" exitCode=0 Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.244851 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" event={"ID":"7b8cd268-ef6d-4c13-a726-f7e6a9beec58","Type":"ContainerDied","Data":"be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154"} Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.261846 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.284430 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.297918 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.319276 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.329808 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.347323 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.348489 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.348540 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.348551 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.348566 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.348577 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:38Z","lastTransitionTime":"2025-12-03T07:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.362736 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.376153 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.391372 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.408279 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.421870 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.437340 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.450540 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.450569 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.450581 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.450598 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.450608 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:38Z","lastTransitionTime":"2025-12-03T07:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.451133 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.466042 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.487082 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.501429 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.513770 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.526700 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.539291 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.552268 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.552297 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.552307 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.552323 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.552334 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:38Z","lastTransitionTime":"2025-12-03T07:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.554155 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.567644 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.579489 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.591189 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.605009 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:38Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.740984 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.741122 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.741149 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.741171 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:38 crc kubenswrapper[4612]: E1203 07:27:38.741273 4612 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:27:38 crc kubenswrapper[4612]: E1203 07:27:38.741320 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:42.741307099 +0000 UTC m=+25.914664499 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:27:38 crc kubenswrapper[4612]: E1203 07:27:38.741641 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:27:42.741631118 +0000 UTC m=+25.914988518 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:27:38 crc kubenswrapper[4612]: E1203 07:27:38.741732 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:27:38 crc kubenswrapper[4612]: E1203 07:27:38.741757 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:27:38 crc kubenswrapper[4612]: E1203 07:27:38.741771 4612 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:38 crc kubenswrapper[4612]: E1203 07:27:38.741800 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:42.741791933 +0000 UTC m=+25.915149333 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:38 crc kubenswrapper[4612]: E1203 07:27:38.741837 4612 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:27:38 crc kubenswrapper[4612]: E1203 07:27:38.741862 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:42.741854504 +0000 UTC m=+25.915211904 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.832735 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.832795 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.832805 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.832823 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.832833 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:38Z","lastTransitionTime":"2025-12-03T07:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.842420 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:38 crc kubenswrapper[4612]: E1203 07:27:38.842730 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:27:38 crc kubenswrapper[4612]: E1203 07:27:38.842792 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:27:38 crc kubenswrapper[4612]: E1203 07:27:38.842809 4612 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:38 crc kubenswrapper[4612]: E1203 07:27:38.842921 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:42.842889471 +0000 UTC m=+26.016247061 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.935887 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.935926 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.935935 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.935969 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:38 crc kubenswrapper[4612]: I1203 07:27:38.935980 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:38Z","lastTransitionTime":"2025-12-03T07:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.039588 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.039810 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.039819 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.039836 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.039845 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:39Z","lastTransitionTime":"2025-12-03T07:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.088687 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.088752 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.088687 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:39 crc kubenswrapper[4612]: E1203 07:27:39.088824 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:27:39 crc kubenswrapper[4612]: E1203 07:27:39.088973 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:27:39 crc kubenswrapper[4612]: E1203 07:27:39.089033 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.141861 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.141916 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.141933 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.141981 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.141999 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:39Z","lastTransitionTime":"2025-12-03T07:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.244823 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.244861 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.244874 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.244890 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.244902 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:39Z","lastTransitionTime":"2025-12-03T07:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.248882 4612 generic.go:334] "Generic (PLEG): container finished" podID="7b8cd268-ef6d-4c13-a726-f7e6a9beec58" containerID="792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22" exitCode=0 Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.249331 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" event={"ID":"7b8cd268-ef6d-4c13-a726-f7e6a9beec58","Type":"ContainerDied","Data":"792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22"} Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.264007 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.282450 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.316218 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.331863 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.347136 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.350671 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.350719 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.350731 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.350753 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.350766 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:39Z","lastTransitionTime":"2025-12-03T07:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.356961 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.367765 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.389687 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.401480 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.416022 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.427210 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.442923 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.454389 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.454432 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.454442 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.454461 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.454472 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:39Z","lastTransitionTime":"2025-12-03T07:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.557087 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.557457 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.557467 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.557484 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.557499 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:39Z","lastTransitionTime":"2025-12-03T07:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.660282 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.660336 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.660347 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.660364 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.660375 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:39Z","lastTransitionTime":"2025-12-03T07:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.715686 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-b2zhj"] Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.716155 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-b2zhj" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.718680 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.718899 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.718975 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.719294 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.731523 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.746342 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.749959 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/02de6f0d-8266-45f0-87f3-8bc2da2e61d2-serviceca\") pod \"node-ca-b2zhj\" (UID: \"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\") " pod="openshift-image-registry/node-ca-b2zhj" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.750006 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zmcg\" (UniqueName: \"kubernetes.io/projected/02de6f0d-8266-45f0-87f3-8bc2da2e61d2-kube-api-access-7zmcg\") pod \"node-ca-b2zhj\" (UID: \"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\") " pod="openshift-image-registry/node-ca-b2zhj" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.750042 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/02de6f0d-8266-45f0-87f3-8bc2da2e61d2-host\") pod \"node-ca-b2zhj\" (UID: \"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\") " pod="openshift-image-registry/node-ca-b2zhj" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.760864 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.762400 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.762443 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.762455 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.762476 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.762488 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:39Z","lastTransitionTime":"2025-12-03T07:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.776409 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.794967 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.811473 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.828134 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.845224 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.851288 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/02de6f0d-8266-45f0-87f3-8bc2da2e61d2-serviceca\") pod \"node-ca-b2zhj\" (UID: \"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\") " pod="openshift-image-registry/node-ca-b2zhj" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.851552 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zmcg\" (UniqueName: \"kubernetes.io/projected/02de6f0d-8266-45f0-87f3-8bc2da2e61d2-kube-api-access-7zmcg\") pod \"node-ca-b2zhj\" (UID: \"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\") " pod="openshift-image-registry/node-ca-b2zhj" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.851714 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/02de6f0d-8266-45f0-87f3-8bc2da2e61d2-host\") pod \"node-ca-b2zhj\" (UID: \"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\") " pod="openshift-image-registry/node-ca-b2zhj" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.851837 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/02de6f0d-8266-45f0-87f3-8bc2da2e61d2-host\") pod \"node-ca-b2zhj\" (UID: \"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\") " pod="openshift-image-registry/node-ca-b2zhj" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.852443 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/02de6f0d-8266-45f0-87f3-8bc2da2e61d2-serviceca\") pod \"node-ca-b2zhj\" (UID: \"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\") " pod="openshift-image-registry/node-ca-b2zhj" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.864837 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.864870 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.864880 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.864894 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.864903 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:39Z","lastTransitionTime":"2025-12-03T07:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.864965 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.872227 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zmcg\" (UniqueName: \"kubernetes.io/projected/02de6f0d-8266-45f0-87f3-8bc2da2e61d2-kube-api-access-7zmcg\") pod \"node-ca-b2zhj\" (UID: \"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\") " pod="openshift-image-registry/node-ca-b2zhj" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.878850 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.891895 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.905855 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.920884 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:39Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.967657 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.967687 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.967694 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.967708 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:39 crc kubenswrapper[4612]: I1203 07:27:39.967717 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:39Z","lastTransitionTime":"2025-12-03T07:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.029724 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-b2zhj" Dec 03 07:27:40 crc kubenswrapper[4612]: W1203 07:27:40.054876 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02de6f0d_8266_45f0_87f3_8bc2da2e61d2.slice/crio-11cc6461d3e7ce8f5ae896b59ea61764a43ab10913cf0a4f79526698108eab47 WatchSource:0}: Error finding container 11cc6461d3e7ce8f5ae896b59ea61764a43ab10913cf0a4f79526698108eab47: Status 404 returned error can't find the container with id 11cc6461d3e7ce8f5ae896b59ea61764a43ab10913cf0a4f79526698108eab47 Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.074697 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.074745 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.074756 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.074773 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.074790 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:40Z","lastTransitionTime":"2025-12-03T07:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.178157 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.178187 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.178215 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.178228 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.178237 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:40Z","lastTransitionTime":"2025-12-03T07:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.257759 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerStarted","Data":"df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e"} Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.263059 4612 generic.go:334] "Generic (PLEG): container finished" podID="7b8cd268-ef6d-4c13-a726-f7e6a9beec58" containerID="c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c" exitCode=0 Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.263108 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" event={"ID":"7b8cd268-ef6d-4c13-a726-f7e6a9beec58","Type":"ContainerDied","Data":"c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c"} Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.266648 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-b2zhj" event={"ID":"02de6f0d-8266-45f0-87f3-8bc2da2e61d2","Type":"ContainerStarted","Data":"11cc6461d3e7ce8f5ae896b59ea61764a43ab10913cf0a4f79526698108eab47"} Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.280869 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.280916 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.280929 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.280963 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.280978 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:40Z","lastTransitionTime":"2025-12-03T07:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.284891 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:40Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.300926 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:40Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.319478 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:40Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.334244 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:40Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.356362 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:40Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.371224 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:40Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.382565 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.382600 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.382609 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.382622 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.382633 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:40Z","lastTransitionTime":"2025-12-03T07:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.384432 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:40Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.401346 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:40Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.415422 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:40Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.428665 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:40Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.443629 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:40Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.461301 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:40Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.477177 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:40Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.485143 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.485197 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.485211 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.485236 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.485248 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:40Z","lastTransitionTime":"2025-12-03T07:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.588579 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.588634 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.588645 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.588661 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.588671 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:40Z","lastTransitionTime":"2025-12-03T07:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.692156 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.692214 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.692227 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.692248 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.692261 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:40Z","lastTransitionTime":"2025-12-03T07:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.795242 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.795280 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.795292 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.795308 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.795318 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:40Z","lastTransitionTime":"2025-12-03T07:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.898017 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.898063 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.898072 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.898084 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:40 crc kubenswrapper[4612]: I1203 07:27:40.898095 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:40Z","lastTransitionTime":"2025-12-03T07:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.000825 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.000866 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.000878 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.000895 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.000909 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:41Z","lastTransitionTime":"2025-12-03T07:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.089445 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.089475 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.089514 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:41 crc kubenswrapper[4612]: E1203 07:27:41.089611 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:27:41 crc kubenswrapper[4612]: E1203 07:27:41.089725 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:27:41 crc kubenswrapper[4612]: E1203 07:27:41.089795 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.104036 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.104080 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.104097 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.104114 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.104128 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:41Z","lastTransitionTime":"2025-12-03T07:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.165889 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.170794 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.176412 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.182357 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.198352 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.208645 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.208709 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.208727 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.208753 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.208768 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:41Z","lastTransitionTime":"2025-12-03T07:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.238442 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.271662 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-b2zhj" event={"ID":"02de6f0d-8266-45f0-87f3-8bc2da2e61d2","Type":"ContainerStarted","Data":"6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92"} Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.274494 4612 generic.go:334] "Generic (PLEG): container finished" podID="7b8cd268-ef6d-4c13-a726-f7e6a9beec58" containerID="3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45" exitCode=0 Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.274588 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" event={"ID":"7b8cd268-ef6d-4c13-a726-f7e6a9beec58","Type":"ContainerDied","Data":"3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45"} Dec 03 07:27:41 crc kubenswrapper[4612]: E1203 07:27:41.297156 4612 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.310710 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.310749 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.310761 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.310777 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.310787 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:41Z","lastTransitionTime":"2025-12-03T07:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.323067 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.351119 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.380252 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.391884 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.407491 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.415467 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.415523 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.415538 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.415564 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.415581 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:41Z","lastTransitionTime":"2025-12-03T07:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.424468 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.444841 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.460057 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.475937 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.487731 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.504641 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.518785 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.518837 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.518853 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.518871 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.518885 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:41Z","lastTransitionTime":"2025-12-03T07:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.520216 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.537239 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.552461 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.567009 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.587725 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.600992 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.618990 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.620910 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.620979 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.620994 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.621023 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.621048 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:41Z","lastTransitionTime":"2025-12-03T07:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.631577 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.646962 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.660817 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.678989 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.693385 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.705985 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:41Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.723845 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.723885 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.723894 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.723908 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.723916 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:41Z","lastTransitionTime":"2025-12-03T07:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.826708 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.826769 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.826780 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.826807 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.826837 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:41Z","lastTransitionTime":"2025-12-03T07:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.929969 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.930414 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.930427 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.930445 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:41 crc kubenswrapper[4612]: I1203 07:27:41.930458 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:41Z","lastTransitionTime":"2025-12-03T07:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.034044 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.034473 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.034554 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.034684 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.034795 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:42Z","lastTransitionTime":"2025-12-03T07:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.150207 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.150248 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.150257 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.150272 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.150284 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:42Z","lastTransitionTime":"2025-12-03T07:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.252655 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.252703 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.252712 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.252729 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.252739 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:42Z","lastTransitionTime":"2025-12-03T07:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.281892 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerStarted","Data":"28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade"} Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.282973 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.283039 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.288761 4612 generic.go:334] "Generic (PLEG): container finished" podID="7b8cd268-ef6d-4c13-a726-f7e6a9beec58" containerID="32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9" exitCode=0 Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.288856 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" event={"ID":"7b8cd268-ef6d-4c13-a726-f7e6a9beec58","Type":"ContainerDied","Data":"32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9"} Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.304767 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.309519 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.310068 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.323844 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.337666 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.350643 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.354545 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.354579 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.354587 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.354603 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.354612 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:42Z","lastTransitionTime":"2025-12-03T07:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.363076 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.384520 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.395797 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.407456 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.423512 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.436728 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.447124 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.456991 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.457035 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.457048 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.457064 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.457076 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:42Z","lastTransitionTime":"2025-12-03T07:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.464209 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.480312 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.498898 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.510741 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.522556 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.535550 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.553216 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.559442 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.559476 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.559490 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.559507 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.559519 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:42Z","lastTransitionTime":"2025-12-03T07:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.562613 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.574268 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.585293 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.598264 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.607024 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.620986 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.633117 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.644307 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.653933 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.661303 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.661333 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.661341 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.661355 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.661363 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:42Z","lastTransitionTime":"2025-12-03T07:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.666491 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:42Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.764343 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.764812 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.764995 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.765187 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.765319 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:42Z","lastTransitionTime":"2025-12-03T07:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.786663 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.786994 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.787080 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.787177 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:42 crc kubenswrapper[4612]: E1203 07:27:42.787435 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:27:50.787395964 +0000 UTC m=+33.960753404 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:27:42 crc kubenswrapper[4612]: E1203 07:27:42.787542 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:27:42 crc kubenswrapper[4612]: E1203 07:27:42.787566 4612 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:27:42 crc kubenswrapper[4612]: E1203 07:27:42.787670 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:50.787648822 +0000 UTC m=+33.961006262 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:27:42 crc kubenswrapper[4612]: E1203 07:27:42.787742 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:27:42 crc kubenswrapper[4612]: E1203 07:27:42.787772 4612 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:42 crc kubenswrapper[4612]: E1203 07:27:42.787827 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:50.787807216 +0000 UTC m=+33.961164626 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:42 crc kubenswrapper[4612]: E1203 07:27:42.788180 4612 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:27:42 crc kubenswrapper[4612]: E1203 07:27:42.789076 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:50.789039201 +0000 UTC m=+33.962396651 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.869105 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.869170 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.869191 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.869239 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.869276 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:42Z","lastTransitionTime":"2025-12-03T07:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.888929 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:42 crc kubenswrapper[4612]: E1203 07:27:42.889140 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:27:42 crc kubenswrapper[4612]: E1203 07:27:42.889165 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:27:42 crc kubenswrapper[4612]: E1203 07:27:42.889184 4612 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:42 crc kubenswrapper[4612]: E1203 07:27:42.889261 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 07:27:50.889239374 +0000 UTC m=+34.062596814 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.973523 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.973771 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.973891 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.974023 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:42 crc kubenswrapper[4612]: I1203 07:27:42.974116 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:42Z","lastTransitionTime":"2025-12-03T07:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.076792 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.076888 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.076905 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.076934 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.076991 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:43Z","lastTransitionTime":"2025-12-03T07:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.089109 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:43 crc kubenswrapper[4612]: E1203 07:27:43.089211 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.089541 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:43 crc kubenswrapper[4612]: E1203 07:27:43.089618 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.089758 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:43 crc kubenswrapper[4612]: E1203 07:27:43.090190 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.178868 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.178927 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.178975 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.179004 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.179022 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:43Z","lastTransitionTime":"2025-12-03T07:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.281290 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.281319 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.281328 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.281349 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.281358 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:43Z","lastTransitionTime":"2025-12-03T07:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.295181 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" event={"ID":"7b8cd268-ef6d-4c13-a726-f7e6a9beec58","Type":"ContainerStarted","Data":"be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e"} Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.295284 4612 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.306645 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:43Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.320005 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:43Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.332056 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:43Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.345301 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:43Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.358350 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:43Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.374458 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:43Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.383349 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.383445 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.383476 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.383492 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.383503 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:43Z","lastTransitionTime":"2025-12-03T07:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.385559 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:43Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.399695 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:43Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.412769 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:43Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.427633 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:43Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.438596 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:43Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.457015 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:43Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.467817 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:43Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.479405 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:43Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.486340 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.486378 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.486386 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.486400 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.486409 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:43Z","lastTransitionTime":"2025-12-03T07:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.589988 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.590034 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.590045 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.590061 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.590072 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:43Z","lastTransitionTime":"2025-12-03T07:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.692474 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.692505 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.692517 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.692532 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.692543 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:43Z","lastTransitionTime":"2025-12-03T07:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.794507 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.794550 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.794565 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.794586 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.794604 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:43Z","lastTransitionTime":"2025-12-03T07:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.897090 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.897142 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.897154 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.897171 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.897185 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:43Z","lastTransitionTime":"2025-12-03T07:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.999807 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.999843 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.999852 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:43 crc kubenswrapper[4612]: I1203 07:27:43.999865 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:43.999874 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:43Z","lastTransitionTime":"2025-12-03T07:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.101961 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.101999 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.102010 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.102025 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.102036 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:44Z","lastTransitionTime":"2025-12-03T07:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.205226 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.205266 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.205275 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.205302 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.205311 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:44Z","lastTransitionTime":"2025-12-03T07:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.298262 4612 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.307981 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.308031 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.308047 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.308069 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.308084 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:44Z","lastTransitionTime":"2025-12-03T07:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.409857 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.409917 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.409936 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.409986 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.410001 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:44Z","lastTransitionTime":"2025-12-03T07:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.512410 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.512445 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.512455 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.512472 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.512483 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:44Z","lastTransitionTime":"2025-12-03T07:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.616053 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.616103 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.616119 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.616142 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.616158 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:44Z","lastTransitionTime":"2025-12-03T07:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.719488 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.719526 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.719539 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.719557 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.719569 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:44Z","lastTransitionTime":"2025-12-03T07:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.822304 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.822344 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.822355 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.822369 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.822380 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:44Z","lastTransitionTime":"2025-12-03T07:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.925492 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.925533 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.925545 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.925562 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:44 crc kubenswrapper[4612]: I1203 07:27:44.925576 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:44Z","lastTransitionTime":"2025-12-03T07:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.028849 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.029234 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.029406 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.029601 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.029742 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:45Z","lastTransitionTime":"2025-12-03T07:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.088973 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.089021 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.088981 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:45 crc kubenswrapper[4612]: E1203 07:27:45.089130 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:27:45 crc kubenswrapper[4612]: E1203 07:27:45.089355 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:27:45 crc kubenswrapper[4612]: E1203 07:27:45.089507 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.133073 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.133128 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.133146 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.133175 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.133193 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:45Z","lastTransitionTime":"2025-12-03T07:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.236449 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.236508 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.236526 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.236551 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.236570 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:45Z","lastTransitionTime":"2025-12-03T07:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.303035 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovnkube-controller/0.log" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.306173 4612 generic.go:334] "Generic (PLEG): container finished" podID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerID="28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade" exitCode=1 Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.306212 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerDied","Data":"28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade"} Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.306887 4612 scope.go:117] "RemoveContainer" containerID="28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.324830 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:45Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.338704 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.339023 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.339157 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.339295 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.339389 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:45Z","lastTransitionTime":"2025-12-03T07:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.346355 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:45Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.361577 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:45Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.383733 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:44Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.389256 5832 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 07:27:44.389462 5832 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.389747 5832 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 07:27:44.389983 5832 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.390136 5832 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.390748 5832 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 07:27:44.390796 5832 factory.go:656] Stopping watch factory\\\\nI1203 07:27:44.390813 5832 ovnkube.go:599] Stopped ovnkube\\\\nI1203 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:45Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.395135 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:45Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.405894 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:45Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.443079 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.443490 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.443646 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.443733 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.443808 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:45Z","lastTransitionTime":"2025-12-03T07:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.458775 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:45Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.481422 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:45Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.499162 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:45Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.518987 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:45Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.540677 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:45Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.546863 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.546902 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.546916 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.546935 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.546968 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:45Z","lastTransitionTime":"2025-12-03T07:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.552711 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:45Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.566218 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:45Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.578460 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:45Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.651000 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.651046 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.651057 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.651084 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.651098 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:45Z","lastTransitionTime":"2025-12-03T07:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.753475 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.753540 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.753556 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.753581 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.753597 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:45Z","lastTransitionTime":"2025-12-03T07:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.855217 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.855250 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.855260 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.855277 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.855286 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:45Z","lastTransitionTime":"2025-12-03T07:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.957044 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.957093 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.957105 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.957120 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:45 crc kubenswrapper[4612]: I1203 07:27:45.957130 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:45Z","lastTransitionTime":"2025-12-03T07:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.058897 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.058927 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.058935 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.058972 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.058983 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:46Z","lastTransitionTime":"2025-12-03T07:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.162566 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.162626 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.162643 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.162666 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.162686 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:46Z","lastTransitionTime":"2025-12-03T07:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.265510 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.265557 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.265573 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.265596 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.265616 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:46Z","lastTransitionTime":"2025-12-03T07:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.312057 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovnkube-controller/0.log" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.315395 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerStarted","Data":"d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513"} Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.315510 4612 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.329627 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:46Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.341511 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:46Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.354730 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:46Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.367370 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.367404 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.367414 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.367431 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.367444 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:46Z","lastTransitionTime":"2025-12-03T07:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.372072 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:44Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.389256 5832 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 07:27:44.389462 5832 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.389747 5832 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 07:27:44.389983 5832 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.390136 5832 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.390748 5832 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 07:27:44.390796 5832 factory.go:656] Stopping watch factory\\\\nI1203 07:27:44.390813 5832 ovnkube.go:599] Stopped ovnkube\\\\nI1203 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:46Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.381707 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:46Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.392998 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:46Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.406678 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:46Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.419666 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:46Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.437664 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:46Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.449442 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:46Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.461103 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:46Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.470174 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.470225 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.470242 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.470262 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.470274 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:46Z","lastTransitionTime":"2025-12-03T07:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.473585 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:46Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.485321 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:46Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.500205 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:46Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.572397 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.572436 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.572446 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.572462 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.572473 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:46Z","lastTransitionTime":"2025-12-03T07:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.674751 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.674805 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.674817 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.674839 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.674852 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:46Z","lastTransitionTime":"2025-12-03T07:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.777785 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.777845 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.777861 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.777878 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.777888 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:46Z","lastTransitionTime":"2025-12-03T07:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.880501 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.880562 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.880577 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.880604 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.880621 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:46Z","lastTransitionTime":"2025-12-03T07:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.983834 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.984224 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.984359 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.984446 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:46 crc kubenswrapper[4612]: I1203 07:27:46.984522 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:46Z","lastTransitionTime":"2025-12-03T07:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.087239 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.087285 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.087295 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.087312 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.087323 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:47Z","lastTransitionTime":"2025-12-03T07:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.092679 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.092749 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.092856 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:47 crc kubenswrapper[4612]: E1203 07:27:47.093493 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:27:47 crc kubenswrapper[4612]: E1203 07:27:47.094826 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:27:47 crc kubenswrapper[4612]: E1203 07:27:47.095011 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.111141 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.131292 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:44Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.389256 5832 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 07:27:44.389462 5832 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.389747 5832 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 07:27:44.389983 5832 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.390136 5832 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.390748 5832 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 07:27:44.390796 5832 factory.go:656] Stopping watch factory\\\\nI1203 07:27:44.390813 5832 ovnkube.go:599] Stopped ovnkube\\\\nI1203 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.142213 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.160278 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.189962 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.190011 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.190022 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.190044 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.190059 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:47Z","lastTransitionTime":"2025-12-03T07:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.192653 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.214937 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.235728 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.262006 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.275380 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.286667 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.291749 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.291888 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.291964 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.292032 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.292094 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:47Z","lastTransitionTime":"2025-12-03T07:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.298662 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.311294 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.320235 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovnkube-controller/1.log" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.320870 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovnkube-controller/0.log" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.323198 4612 generic.go:334] "Generic (PLEG): container finished" podID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerID="d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513" exitCode=1 Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.323233 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerDied","Data":"d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513"} Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.323278 4612 scope.go:117] "RemoveContainer" containerID="28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.324164 4612 scope.go:117] "RemoveContainer" containerID="d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513" Dec 03 07:27:47 crc kubenswrapper[4612]: E1203 07:27:47.324363 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.327169 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.341248 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.352926 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.365675 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.382377 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.394166 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.394747 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.394774 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.394785 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.394799 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.394808 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:47Z","lastTransitionTime":"2025-12-03T07:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.413778 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.428040 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.439311 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.450806 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.463063 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.476033 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.488963 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.497468 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.497507 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.497519 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.497537 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.497549 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:47Z","lastTransitionTime":"2025-12-03T07:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.500823 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.516638 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:44Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.389256 5832 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 07:27:44.389462 5832 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.389747 5832 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 07:27:44.389983 5832 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.390136 5832 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.390748 5832 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 07:27:44.390796 5832 factory.go:656] Stopping watch factory\\\\nI1203 07:27:44.390813 5832 ovnkube.go:599] Stopped ovnkube\\\\nI1203 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:46Z\\\",\\\"message\\\":\\\"one reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 07:27:46.126233 5963 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126239 5963 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126243 5963 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1203 07:27:46.126255 5963 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF1203 07:27:46.126256 5963 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.526041 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.600445 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.600527 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.600551 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.600577 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.600637 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:47Z","lastTransitionTime":"2025-12-03T07:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.649618 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.649661 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.649687 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.649707 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.649720 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:47Z","lastTransitionTime":"2025-12-03T07:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:47 crc kubenswrapper[4612]: E1203 07:27:47.662262 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.668025 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.668092 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.668117 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.668150 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.668172 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:47Z","lastTransitionTime":"2025-12-03T07:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:47 crc kubenswrapper[4612]: E1203 07:27:47.683111 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.947147 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.947207 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.947224 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.947250 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.947270 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:47Z","lastTransitionTime":"2025-12-03T07:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:47 crc kubenswrapper[4612]: E1203 07:27:47.963702 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.968215 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.968242 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.968252 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.968265 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.968274 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:47Z","lastTransitionTime":"2025-12-03T07:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:47 crc kubenswrapper[4612]: E1203 07:27:47.984178 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:47Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.988860 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.988904 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.988914 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.988934 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:47 crc kubenswrapper[4612]: I1203 07:27:47.988964 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:47Z","lastTransitionTime":"2025-12-03T07:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:48 crc kubenswrapper[4612]: E1203 07:27:48.004775 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:48Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:48 crc kubenswrapper[4612]: E1203 07:27:48.004886 4612 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.006396 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.006438 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.006454 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.006474 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.006496 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:48Z","lastTransitionTime":"2025-12-03T07:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.109348 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.109416 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.109430 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.109444 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.109476 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:48Z","lastTransitionTime":"2025-12-03T07:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.212005 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.212041 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.212052 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.212070 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.212084 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:48Z","lastTransitionTime":"2025-12-03T07:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.315194 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.315251 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.315269 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.315293 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.315311 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:48Z","lastTransitionTime":"2025-12-03T07:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.333937 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovnkube-controller/1.log" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.418330 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.418380 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.418396 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.418417 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.418434 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:48Z","lastTransitionTime":"2025-12-03T07:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.521601 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.522067 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.522275 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.522452 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.522662 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:48Z","lastTransitionTime":"2025-12-03T07:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.626273 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.626599 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.626780 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.626919 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.627119 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:48Z","lastTransitionTime":"2025-12-03T07:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.730170 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.730228 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.730246 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.730269 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.730286 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:48Z","lastTransitionTime":"2025-12-03T07:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.832821 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.833262 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.833457 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.833656 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.833844 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:48Z","lastTransitionTime":"2025-12-03T07:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.908513 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276"] Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.908919 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.912965 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.916291 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.926200 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:48Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.936532 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.936584 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.936593 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.936610 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.936637 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:48Z","lastTransitionTime":"2025-12-03T07:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.941865 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:48Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.948665 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkflz\" (UniqueName: \"kubernetes.io/projected/020a82cd-f275-41b6-af72-0de61a31fc0b-kube-api-access-dkflz\") pod \"ovnkube-control-plane-749d76644c-lg276\" (UID: \"020a82cd-f275-41b6-af72-0de61a31fc0b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.948779 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/020a82cd-f275-41b6-af72-0de61a31fc0b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lg276\" (UID: \"020a82cd-f275-41b6-af72-0de61a31fc0b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.948930 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/020a82cd-f275-41b6-af72-0de61a31fc0b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lg276\" (UID: \"020a82cd-f275-41b6-af72-0de61a31fc0b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.949034 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/020a82cd-f275-41b6-af72-0de61a31fc0b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lg276\" (UID: \"020a82cd-f275-41b6-af72-0de61a31fc0b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.957638 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:44Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.389256 5832 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 07:27:44.389462 5832 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.389747 5832 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 07:27:44.389983 5832 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.390136 5832 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.390748 5832 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 07:27:44.390796 5832 factory.go:656] Stopping watch factory\\\\nI1203 07:27:44.390813 5832 ovnkube.go:599] Stopped ovnkube\\\\nI1203 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:46Z\\\",\\\"message\\\":\\\"one reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 07:27:46.126233 5963 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126239 5963 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126243 5963 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1203 07:27:46.126255 5963 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF1203 07:27:46.126256 5963 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:48Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.966963 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:48Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.976958 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:48Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:48 crc kubenswrapper[4612]: I1203 07:27:48.990834 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:48Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.003854 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:49Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.017399 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:49Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.029591 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:49Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.039789 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.039837 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.039848 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.039861 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.039870 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:49Z","lastTransitionTime":"2025-12-03T07:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.045600 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:49Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.049686 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkflz\" (UniqueName: \"kubernetes.io/projected/020a82cd-f275-41b6-af72-0de61a31fc0b-kube-api-access-dkflz\") pod \"ovnkube-control-plane-749d76644c-lg276\" (UID: \"020a82cd-f275-41b6-af72-0de61a31fc0b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.049767 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/020a82cd-f275-41b6-af72-0de61a31fc0b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lg276\" (UID: \"020a82cd-f275-41b6-af72-0de61a31fc0b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.049792 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/020a82cd-f275-41b6-af72-0de61a31fc0b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lg276\" (UID: \"020a82cd-f275-41b6-af72-0de61a31fc0b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.049816 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/020a82cd-f275-41b6-af72-0de61a31fc0b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lg276\" (UID: \"020a82cd-f275-41b6-af72-0de61a31fc0b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.050758 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/020a82cd-f275-41b6-af72-0de61a31fc0b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lg276\" (UID: \"020a82cd-f275-41b6-af72-0de61a31fc0b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.051017 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/020a82cd-f275-41b6-af72-0de61a31fc0b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lg276\" (UID: \"020a82cd-f275-41b6-af72-0de61a31fc0b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.056480 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/020a82cd-f275-41b6-af72-0de61a31fc0b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lg276\" (UID: \"020a82cd-f275-41b6-af72-0de61a31fc0b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.059204 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:49Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.069005 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkflz\" (UniqueName: \"kubernetes.io/projected/020a82cd-f275-41b6-af72-0de61a31fc0b-kube-api-access-dkflz\") pod \"ovnkube-control-plane-749d76644c-lg276\" (UID: \"020a82cd-f275-41b6-af72-0de61a31fc0b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.073025 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:49Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.087867 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:49Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.088926 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.089001 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:49 crc kubenswrapper[4612]: E1203 07:27:49.089057 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.089119 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:49 crc kubenswrapper[4612]: E1203 07:27:49.089255 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:27:49 crc kubenswrapper[4612]: E1203 07:27:49.089116 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.102082 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:49Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.112104 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:49Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.141909 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.142181 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.142266 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.142354 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.142430 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:49Z","lastTransitionTime":"2025-12-03T07:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.220407 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" Dec 03 07:27:49 crc kubenswrapper[4612]: W1203 07:27:49.232681 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod020a82cd_f275_41b6_af72_0de61a31fc0b.slice/crio-7908dd3f2eff4d0f55afc021bf1a96087ac9262637ac704a0aa5c285d16b24bc WatchSource:0}: Error finding container 7908dd3f2eff4d0f55afc021bf1a96087ac9262637ac704a0aa5c285d16b24bc: Status 404 returned error can't find the container with id 7908dd3f2eff4d0f55afc021bf1a96087ac9262637ac704a0aa5c285d16b24bc Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.244407 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.244599 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.244733 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.244853 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.244983 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:49Z","lastTransitionTime":"2025-12-03T07:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.345569 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" event={"ID":"020a82cd-f275-41b6-af72-0de61a31fc0b","Type":"ContainerStarted","Data":"7908dd3f2eff4d0f55afc021bf1a96087ac9262637ac704a0aa5c285d16b24bc"} Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.347388 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.347410 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.347424 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.347434 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.347444 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:49Z","lastTransitionTime":"2025-12-03T07:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.449357 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.449399 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.449410 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.449426 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.449437 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:49Z","lastTransitionTime":"2025-12-03T07:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.550765 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.550791 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.550801 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.550830 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.550838 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:49Z","lastTransitionTime":"2025-12-03T07:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.653502 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.653548 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.653560 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.653575 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.653585 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:49Z","lastTransitionTime":"2025-12-03T07:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.756486 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.756511 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.756519 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.756532 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.756542 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:49Z","lastTransitionTime":"2025-12-03T07:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.859210 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.859265 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.859276 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.859294 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.859305 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:49Z","lastTransitionTime":"2025-12-03T07:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.962310 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.962341 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.962349 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.962362 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.962371 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:49Z","lastTransitionTime":"2025-12-03T07:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.984834 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-8m4gl"] Dec 03 07:27:49 crc kubenswrapper[4612]: I1203 07:27:49.985337 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:27:49 crc kubenswrapper[4612]: E1203 07:27:49.985399 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.001567 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:49Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.017285 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.032635 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.048625 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.059025 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs\") pod \"network-metrics-daemon-8m4gl\" (UID: \"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\") " pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.059090 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl4t8\" (UniqueName: \"kubernetes.io/projected/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-kube-api-access-gl4t8\") pod \"network-metrics-daemon-8m4gl\" (UID: \"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\") " pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.065632 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.065688 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.065699 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.065713 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.065722 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:50Z","lastTransitionTime":"2025-12-03T07:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.067294 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.080011 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.094483 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.109842 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.122277 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.135740 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.155622 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:44Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.389256 5832 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 07:27:44.389462 5832 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.389747 5832 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 07:27:44.389983 5832 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.390136 5832 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.390748 5832 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 07:27:44.390796 5832 factory.go:656] Stopping watch factory\\\\nI1203 07:27:44.390813 5832 ovnkube.go:599] Stopped ovnkube\\\\nI1203 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:46Z\\\",\\\"message\\\":\\\"one reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 07:27:46.126233 5963 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126239 5963 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126243 5963 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1203 07:27:46.126255 5963 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF1203 07:27:46.126256 5963 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.159668 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl4t8\" (UniqueName: \"kubernetes.io/projected/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-kube-api-access-gl4t8\") pod \"network-metrics-daemon-8m4gl\" (UID: \"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\") " pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.159743 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs\") pod \"network-metrics-daemon-8m4gl\" (UID: \"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\") " pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.159839 4612 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.159909 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs podName:fca5f46d-010f-4d46-8926-fd2a2cb9ee1e nodeName:}" failed. No retries permitted until 2025-12-03 07:27:50.65989349 +0000 UTC m=+33.833250890 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs") pod "network-metrics-daemon-8m4gl" (UID: "fca5f46d-010f-4d46-8926-fd2a2cb9ee1e") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.168295 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.168332 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.168342 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.168357 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.168367 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:50Z","lastTransitionTime":"2025-12-03T07:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.169451 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.177858 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl4t8\" (UniqueName: \"kubernetes.io/projected/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-kube-api-access-gl4t8\") pod \"network-metrics-daemon-8m4gl\" (UID: \"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\") " pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.180872 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.193938 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.208835 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.223026 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.271874 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.272726 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.272743 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.272769 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.272787 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:50Z","lastTransitionTime":"2025-12-03T07:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.351465 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" event={"ID":"020a82cd-f275-41b6-af72-0de61a31fc0b","Type":"ContainerStarted","Data":"69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f"} Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.351507 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" event={"ID":"020a82cd-f275-41b6-af72-0de61a31fc0b","Type":"ContainerStarted","Data":"ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550"} Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.364124 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.375660 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.375967 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.376105 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.376262 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.376390 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:50Z","lastTransitionTime":"2025-12-03T07:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.377933 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.393589 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.405116 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.415554 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.428576 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.438963 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.450247 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.464031 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.478329 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.478920 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.479017 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.479032 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.479054 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.479067 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:50Z","lastTransitionTime":"2025-12-03T07:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.492851 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.504880 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.518049 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.534351 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28df06150911b711ac861f421bcfb452ad2c768dea7f151c477076541bb97ade\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:44Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.389256 5832 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 07:27:44.389462 5832 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.389747 5832 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 07:27:44.389983 5832 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.390136 5832 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 07:27:44.390748 5832 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 07:27:44.390796 5832 factory.go:656] Stopping watch factory\\\\nI1203 07:27:44.390813 5832 ovnkube.go:599] Stopped ovnkube\\\\nI1203 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:46Z\\\",\\\"message\\\":\\\"one reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 07:27:46.126233 5963 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126239 5963 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126243 5963 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1203 07:27:46.126255 5963 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF1203 07:27:46.126256 5963 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.543005 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.554475 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:50Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.581801 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.581841 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.581851 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.581868 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.581878 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:50Z","lastTransitionTime":"2025-12-03T07:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.665970 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs\") pod \"network-metrics-daemon-8m4gl\" (UID: \"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\") " pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.666171 4612 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.666268 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs podName:fca5f46d-010f-4d46-8926-fd2a2cb9ee1e nodeName:}" failed. No retries permitted until 2025-12-03 07:27:51.666244766 +0000 UTC m=+34.839602186 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs") pod "network-metrics-daemon-8m4gl" (UID: "fca5f46d-010f-4d46-8926-fd2a2cb9ee1e") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.683658 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.683702 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.683715 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.683735 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.683750 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:50Z","lastTransitionTime":"2025-12-03T07:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.786229 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.786274 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.786287 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.786305 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.786318 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:50Z","lastTransitionTime":"2025-12-03T07:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.867192 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.867312 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.867342 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.867369 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:28:06.867351211 +0000 UTC m=+50.040708611 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.867395 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.867418 4612 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.867461 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:28:06.867450114 +0000 UTC m=+50.040807534 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.867518 4612 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.867556 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:28:06.867546726 +0000 UTC m=+50.040904126 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.867695 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.867754 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.867778 4612 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.867870 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 07:28:06.867844594 +0000 UTC m=+50.041202024 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.889724 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.889797 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.889819 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.889850 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.889871 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:50Z","lastTransitionTime":"2025-12-03T07:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.968442 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.968648 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.968683 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.968695 4612 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:50 crc kubenswrapper[4612]: E1203 07:27:50.968755 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 07:28:06.968738906 +0000 UTC m=+50.142096306 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.993012 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.993058 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.993071 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.993096 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:50 crc kubenswrapper[4612]: I1203 07:27:50.993121 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:50Z","lastTransitionTime":"2025-12-03T07:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.089357 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.089399 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.089399 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:51 crc kubenswrapper[4612]: E1203 07:27:51.089488 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:27:51 crc kubenswrapper[4612]: E1203 07:27:51.089691 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:27:51 crc kubenswrapper[4612]: E1203 07:27:51.089815 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.095743 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.095771 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.095783 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.095798 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.095809 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:51Z","lastTransitionTime":"2025-12-03T07:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.198446 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.198481 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.198492 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.198507 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.198518 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:51Z","lastTransitionTime":"2025-12-03T07:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.301514 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.301559 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.301569 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.301588 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.301605 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:51Z","lastTransitionTime":"2025-12-03T07:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.404466 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.405151 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.405292 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.405492 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.405613 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:51Z","lastTransitionTime":"2025-12-03T07:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.508387 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.508432 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.508443 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.508459 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.508471 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:51Z","lastTransitionTime":"2025-12-03T07:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.599673 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.600738 4612 scope.go:117] "RemoveContainer" containerID="d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513" Dec 03 07:27:51 crc kubenswrapper[4612]: E1203 07:27:51.600985 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.611136 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.611185 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.611203 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.611273 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.611337 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:51Z","lastTransitionTime":"2025-12-03T07:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.612316 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.625698 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.639288 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.660076 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:46Z\\\",\\\"message\\\":\\\"one reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 07:27:46.126233 5963 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126239 5963 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126243 5963 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1203 07:27:46.126255 5963 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF1203 07:27:46.126256 5963 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.670927 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.675727 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs\") pod \"network-metrics-daemon-8m4gl\" (UID: \"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\") " pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:27:51 crc kubenswrapper[4612]: E1203 07:27:51.676093 4612 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:27:51 crc kubenswrapper[4612]: E1203 07:27:51.676178 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs podName:fca5f46d-010f-4d46-8926-fd2a2cb9ee1e nodeName:}" failed. No retries permitted until 2025-12-03 07:27:53.676162137 +0000 UTC m=+36.849519537 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs") pod "network-metrics-daemon-8m4gl" (UID: "fca5f46d-010f-4d46-8926-fd2a2cb9ee1e") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.684106 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.696338 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.706883 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.714045 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.714087 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.714101 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.714119 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.714133 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:51Z","lastTransitionTime":"2025-12-03T07:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.720170 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.729421 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.738307 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.748137 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.761783 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.772394 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.782959 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.795212 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:51Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.816520 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.816553 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.816562 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.816574 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.816583 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:51Z","lastTransitionTime":"2025-12-03T07:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.918574 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.918628 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.918646 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.918663 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:51 crc kubenswrapper[4612]: I1203 07:27:51.918674 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:51Z","lastTransitionTime":"2025-12-03T07:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.021561 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.021615 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.021631 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.021654 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.021672 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:52Z","lastTransitionTime":"2025-12-03T07:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.046507 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.070619 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.089437 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:27:52 crc kubenswrapper[4612]: E1203 07:27:52.089678 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.092477 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.116692 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.123741 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.123786 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.123795 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.123809 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.123820 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:52Z","lastTransitionTime":"2025-12-03T07:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.130922 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.143681 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.163967 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.178016 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.189828 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.202310 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.217554 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.225918 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.226188 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.226286 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.226391 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.226491 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:52Z","lastTransitionTime":"2025-12-03T07:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.230542 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.246303 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.259612 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.275690 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:46Z\\\",\\\"message\\\":\\\"one reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 07:27:46.126233 5963 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126239 5963 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126243 5963 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1203 07:27:46.126255 5963 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF1203 07:27:46.126256 5963 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.286738 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.297963 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:52Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.328875 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.328980 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.328993 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.329009 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.329020 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:52Z","lastTransitionTime":"2025-12-03T07:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.431928 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.431999 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.432011 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.432027 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.432039 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:52Z","lastTransitionTime":"2025-12-03T07:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.534395 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.534442 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.534458 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.534476 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.534489 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:52Z","lastTransitionTime":"2025-12-03T07:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.637311 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.637358 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.637368 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.637383 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.637395 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:52Z","lastTransitionTime":"2025-12-03T07:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.740670 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.740711 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.740720 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.740732 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.740741 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:52Z","lastTransitionTime":"2025-12-03T07:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.843438 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.843498 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.843522 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.843556 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.843576 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:52Z","lastTransitionTime":"2025-12-03T07:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.946738 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.947143 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.947312 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.947471 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:52 crc kubenswrapper[4612]: I1203 07:27:52.947619 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:52Z","lastTransitionTime":"2025-12-03T07:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.050600 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.050889 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.051305 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.051599 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.051941 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:53Z","lastTransitionTime":"2025-12-03T07:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.089225 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:53 crc kubenswrapper[4612]: E1203 07:27:53.089389 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.089518 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:53 crc kubenswrapper[4612]: E1203 07:27:53.089688 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.089991 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:53 crc kubenswrapper[4612]: E1203 07:27:53.090071 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.155824 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.155870 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.155883 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.155900 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.155913 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:53Z","lastTransitionTime":"2025-12-03T07:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.259068 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.259127 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.259144 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.259166 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.259186 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:53Z","lastTransitionTime":"2025-12-03T07:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.361596 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.361637 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.361645 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.361658 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.361667 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:53Z","lastTransitionTime":"2025-12-03T07:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.465177 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.465250 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.465273 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.465304 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.465330 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:53Z","lastTransitionTime":"2025-12-03T07:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.567797 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.567839 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.567850 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.567865 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.567877 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:53Z","lastTransitionTime":"2025-12-03T07:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.670641 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.670677 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.670707 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.670722 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.670733 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:53Z","lastTransitionTime":"2025-12-03T07:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.696459 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs\") pod \"network-metrics-daemon-8m4gl\" (UID: \"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\") " pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:27:53 crc kubenswrapper[4612]: E1203 07:27:53.696608 4612 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:27:53 crc kubenswrapper[4612]: E1203 07:27:53.696668 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs podName:fca5f46d-010f-4d46-8926-fd2a2cb9ee1e nodeName:}" failed. No retries permitted until 2025-12-03 07:27:57.696651602 +0000 UTC m=+40.870009012 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs") pod "network-metrics-daemon-8m4gl" (UID: "fca5f46d-010f-4d46-8926-fd2a2cb9ee1e") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.772429 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.772478 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.772492 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.772512 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.772527 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:53Z","lastTransitionTime":"2025-12-03T07:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.874667 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.874700 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.874710 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.874725 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.874736 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:53Z","lastTransitionTime":"2025-12-03T07:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.977489 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.977525 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.977533 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.977546 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:53 crc kubenswrapper[4612]: I1203 07:27:53.977556 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:53Z","lastTransitionTime":"2025-12-03T07:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.079931 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.079974 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.079982 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.079994 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.080003 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:54Z","lastTransitionTime":"2025-12-03T07:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.088377 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:27:54 crc kubenswrapper[4612]: E1203 07:27:54.088478 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.182641 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.182728 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.182751 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.182784 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.182816 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:54Z","lastTransitionTime":"2025-12-03T07:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.285056 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.285126 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.285143 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.285165 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.285182 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:54Z","lastTransitionTime":"2025-12-03T07:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.387871 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.387907 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.387916 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.387930 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.387957 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:54Z","lastTransitionTime":"2025-12-03T07:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.490073 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.490115 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.490129 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.490153 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.490173 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:54Z","lastTransitionTime":"2025-12-03T07:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.593288 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.593366 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.593391 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.593421 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.593445 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:54Z","lastTransitionTime":"2025-12-03T07:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.697096 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.697160 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.697182 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.697207 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.697225 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:54Z","lastTransitionTime":"2025-12-03T07:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.799507 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.799560 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.799577 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.799600 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.799619 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:54Z","lastTransitionTime":"2025-12-03T07:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.902447 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.902889 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.903112 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.903316 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:54 crc kubenswrapper[4612]: I1203 07:27:54.903504 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:54Z","lastTransitionTime":"2025-12-03T07:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.006650 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.007091 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.007299 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.007498 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.007671 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:55Z","lastTransitionTime":"2025-12-03T07:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.088897 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:55 crc kubenswrapper[4612]: E1203 07:27:55.089507 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.089111 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.088930 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:55 crc kubenswrapper[4612]: E1203 07:27:55.090113 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:27:55 crc kubenswrapper[4612]: E1203 07:27:55.090333 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.113531 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.113677 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.113700 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.113738 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.113764 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:55Z","lastTransitionTime":"2025-12-03T07:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.218463 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.218938 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.219286 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.219505 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.219708 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:55Z","lastTransitionTime":"2025-12-03T07:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.323056 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.323108 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.323119 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.323136 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.323148 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:55Z","lastTransitionTime":"2025-12-03T07:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.425765 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.425807 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.425818 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.425836 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.425854 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:55Z","lastTransitionTime":"2025-12-03T07:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.528757 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.528802 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.528814 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.528830 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.528844 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:55Z","lastTransitionTime":"2025-12-03T07:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.631470 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.631525 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.631537 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.631558 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.631572 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:55Z","lastTransitionTime":"2025-12-03T07:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.734815 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.734856 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.734865 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.734878 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.734886 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:55Z","lastTransitionTime":"2025-12-03T07:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.837540 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.837581 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.837592 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.837606 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.837616 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:55Z","lastTransitionTime":"2025-12-03T07:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.939555 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.939589 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.939600 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.939615 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:55 crc kubenswrapper[4612]: I1203 07:27:55.939626 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:55Z","lastTransitionTime":"2025-12-03T07:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.041705 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.041750 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.041762 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.041784 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.041796 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:56Z","lastTransitionTime":"2025-12-03T07:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.088689 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:27:56 crc kubenswrapper[4612]: E1203 07:27:56.088819 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.145327 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.145367 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.145380 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.145398 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.145410 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:56Z","lastTransitionTime":"2025-12-03T07:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.247547 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.247576 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.247583 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.247594 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.247603 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:56Z","lastTransitionTime":"2025-12-03T07:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.349913 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.349973 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.349984 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.350006 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.350017 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:56Z","lastTransitionTime":"2025-12-03T07:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.453022 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.453660 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.453858 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.454132 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.454542 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:56Z","lastTransitionTime":"2025-12-03T07:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.558775 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.558851 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.558868 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.558897 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.558917 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:56Z","lastTransitionTime":"2025-12-03T07:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.662043 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.662079 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.662089 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.662105 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.662116 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:56Z","lastTransitionTime":"2025-12-03T07:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.764932 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.765003 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.765017 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.765039 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.765051 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:56Z","lastTransitionTime":"2025-12-03T07:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.867365 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.867594 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.867663 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.867723 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.867787 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:56Z","lastTransitionTime":"2025-12-03T07:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.970134 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.970187 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.970199 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.970217 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:56 crc kubenswrapper[4612]: I1203 07:27:56.970228 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:56Z","lastTransitionTime":"2025-12-03T07:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.072300 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.072355 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.072367 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.072388 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.072401 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:57Z","lastTransitionTime":"2025-12-03T07:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.088551 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.088600 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.088601 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:57 crc kubenswrapper[4612]: E1203 07:27:57.088771 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:27:57 crc kubenswrapper[4612]: E1203 07:27:57.089041 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:27:57 crc kubenswrapper[4612]: E1203 07:27:57.089191 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.112322 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.125626 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.138967 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.152660 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.170021 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.174312 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.174346 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.174355 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.174368 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.174377 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:57Z","lastTransitionTime":"2025-12-03T07:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.198611 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:46Z\\\",\\\"message\\\":\\\"one reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 07:27:46.126233 5963 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126239 5963 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126243 5963 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1203 07:27:46.126255 5963 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF1203 07:27:46.126256 5963 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.209213 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.221600 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.234579 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.250408 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.267124 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.276113 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.276152 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.276163 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.276179 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.276190 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:57Z","lastTransitionTime":"2025-12-03T07:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.281859 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.294423 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.305439 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.316039 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.326380 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:57Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.378637 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.378680 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.378698 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.378715 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.378727 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:57Z","lastTransitionTime":"2025-12-03T07:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.481719 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.481784 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.481799 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.481816 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.481830 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:57Z","lastTransitionTime":"2025-12-03T07:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.584907 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.584999 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.585016 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.585040 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.585072 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:57Z","lastTransitionTime":"2025-12-03T07:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.687732 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.687778 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.687789 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.687806 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.687823 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:57Z","lastTransitionTime":"2025-12-03T07:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.745824 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs\") pod \"network-metrics-daemon-8m4gl\" (UID: \"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\") " pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:27:57 crc kubenswrapper[4612]: E1203 07:27:57.746049 4612 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:27:57 crc kubenswrapper[4612]: E1203 07:27:57.746143 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs podName:fca5f46d-010f-4d46-8926-fd2a2cb9ee1e nodeName:}" failed. No retries permitted until 2025-12-03 07:28:05.746120348 +0000 UTC m=+48.919477778 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs") pod "network-metrics-daemon-8m4gl" (UID: "fca5f46d-010f-4d46-8926-fd2a2cb9ee1e") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.791352 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.791426 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.791452 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.791500 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.791526 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:57Z","lastTransitionTime":"2025-12-03T07:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.894125 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.894169 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.894179 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.894197 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.894208 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:57Z","lastTransitionTime":"2025-12-03T07:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.997181 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.997225 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.997234 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.997250 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:57 crc kubenswrapper[4612]: I1203 07:27:57.997258 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:57Z","lastTransitionTime":"2025-12-03T07:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.025062 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.025146 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.025161 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.025177 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.025189 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:58Z","lastTransitionTime":"2025-12-03T07:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:58 crc kubenswrapper[4612]: E1203 07:27:58.053848 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:58Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.059756 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.059812 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.059829 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.059853 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.059870 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:58Z","lastTransitionTime":"2025-12-03T07:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:58 crc kubenswrapper[4612]: E1203 07:27:58.082197 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:58Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.087306 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.087345 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.087354 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.087368 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.087379 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:58Z","lastTransitionTime":"2025-12-03T07:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.088724 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:27:58 crc kubenswrapper[4612]: E1203 07:27:58.088994 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:27:58 crc kubenswrapper[4612]: E1203 07:27:58.109127 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:58Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.113736 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.113762 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.113770 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.113787 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.113799 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:58Z","lastTransitionTime":"2025-12-03T07:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:58 crc kubenswrapper[4612]: E1203 07:27:58.127315 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:58Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.131852 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.131891 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.131903 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.131920 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.131934 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:58Z","lastTransitionTime":"2025-12-03T07:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:58 crc kubenswrapper[4612]: E1203 07:27:58.143518 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:27:58Z is after 2025-08-24T17:21:41Z" Dec 03 07:27:58 crc kubenswrapper[4612]: E1203 07:27:58.143646 4612 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.145153 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.145176 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.145183 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.145195 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.145204 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:58Z","lastTransitionTime":"2025-12-03T07:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.247691 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.247735 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.247752 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.247770 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.247784 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:58Z","lastTransitionTime":"2025-12-03T07:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.350888 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.351025 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.351054 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.351082 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.351100 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:58Z","lastTransitionTime":"2025-12-03T07:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.453838 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.453900 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.453974 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.454008 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.454026 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:58Z","lastTransitionTime":"2025-12-03T07:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.556731 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.556784 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.556800 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.556823 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.556839 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:58Z","lastTransitionTime":"2025-12-03T07:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.660057 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.660125 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.660150 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.660179 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.660204 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:58Z","lastTransitionTime":"2025-12-03T07:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.764267 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.764346 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.764363 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.764388 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.764406 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:58Z","lastTransitionTime":"2025-12-03T07:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.867557 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.867615 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.867632 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.867655 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.867672 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:58Z","lastTransitionTime":"2025-12-03T07:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.970709 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.970787 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.970825 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.970853 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:58 crc kubenswrapper[4612]: I1203 07:27:58.970874 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:58Z","lastTransitionTime":"2025-12-03T07:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.074590 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.074663 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.074689 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.074718 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.074739 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:59Z","lastTransitionTime":"2025-12-03T07:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.089210 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.089261 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:27:59 crc kubenswrapper[4612]: E1203 07:27:59.089461 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.089510 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:27:59 crc kubenswrapper[4612]: E1203 07:27:59.089708 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:27:59 crc kubenswrapper[4612]: E1203 07:27:59.089765 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.177643 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.177698 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.177719 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.177748 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.177770 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:59Z","lastTransitionTime":"2025-12-03T07:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.280430 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.280556 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.280572 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.280599 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.280616 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:59Z","lastTransitionTime":"2025-12-03T07:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.383589 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.383644 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.383659 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.383680 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.383697 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:59Z","lastTransitionTime":"2025-12-03T07:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.486186 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.486236 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.486247 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.486264 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.486728 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:59Z","lastTransitionTime":"2025-12-03T07:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.589759 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.589795 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.589805 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.589820 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.589831 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:59Z","lastTransitionTime":"2025-12-03T07:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.692420 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.692476 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.692493 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.692604 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.692628 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:59Z","lastTransitionTime":"2025-12-03T07:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.795357 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.795393 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.795404 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.795420 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.795430 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:59Z","lastTransitionTime":"2025-12-03T07:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.898597 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.898667 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.898686 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.898708 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:27:59 crc kubenswrapper[4612]: I1203 07:27:59.898724 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:27:59Z","lastTransitionTime":"2025-12-03T07:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.002103 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.002165 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.002187 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.002216 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.002242 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:00Z","lastTransitionTime":"2025-12-03T07:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.088663 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:00 crc kubenswrapper[4612]: E1203 07:28:00.088899 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.105674 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.105735 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.105934 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.105997 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.106022 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:00Z","lastTransitionTime":"2025-12-03T07:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.209363 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.209417 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.209434 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.209458 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.209476 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:00Z","lastTransitionTime":"2025-12-03T07:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.312216 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.312277 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.312302 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.312333 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.312354 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:00Z","lastTransitionTime":"2025-12-03T07:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.414986 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.415033 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.415058 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.415085 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.415106 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:00Z","lastTransitionTime":"2025-12-03T07:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.518126 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.518197 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.518218 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.518244 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.518262 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:00Z","lastTransitionTime":"2025-12-03T07:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.620313 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.620358 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.620367 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.620381 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.620390 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:00Z","lastTransitionTime":"2025-12-03T07:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.723013 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.723077 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.723097 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.723125 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.723143 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:00Z","lastTransitionTime":"2025-12-03T07:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.827137 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.827171 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.827182 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.827198 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.827211 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:00Z","lastTransitionTime":"2025-12-03T07:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.929908 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.930002 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.930020 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.930045 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:00 crc kubenswrapper[4612]: I1203 07:28:00.930064 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:00Z","lastTransitionTime":"2025-12-03T07:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.032919 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.033025 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.033048 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.033083 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.033106 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:01Z","lastTransitionTime":"2025-12-03T07:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.088524 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.088587 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:01 crc kubenswrapper[4612]: E1203 07:28:01.088695 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.088766 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:01 crc kubenswrapper[4612]: E1203 07:28:01.089129 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:01 crc kubenswrapper[4612]: E1203 07:28:01.089233 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.136848 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.136906 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.136926 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.137051 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.137100 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:01Z","lastTransitionTime":"2025-12-03T07:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.240645 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.240963 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.241035 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.241105 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.241187 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:01Z","lastTransitionTime":"2025-12-03T07:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.343999 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.344038 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.344078 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.344095 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.344106 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:01Z","lastTransitionTime":"2025-12-03T07:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.447422 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.447524 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.447541 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.447561 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.447574 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:01Z","lastTransitionTime":"2025-12-03T07:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.550122 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.550170 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.550186 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.550207 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.550222 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:01Z","lastTransitionTime":"2025-12-03T07:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.652341 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.652380 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.652389 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.652405 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.652417 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:01Z","lastTransitionTime":"2025-12-03T07:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.755476 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.755572 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.755590 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.755612 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.755630 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:01Z","lastTransitionTime":"2025-12-03T07:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.858521 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.858573 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.858585 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.858604 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.858615 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:01Z","lastTransitionTime":"2025-12-03T07:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.961681 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.961746 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.961769 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.961799 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:01 crc kubenswrapper[4612]: I1203 07:28:01.961823 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:01Z","lastTransitionTime":"2025-12-03T07:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.064812 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.064911 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.065015 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.065044 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.065064 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:02Z","lastTransitionTime":"2025-12-03T07:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.088359 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:02 crc kubenswrapper[4612]: E1203 07:28:02.088460 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.168659 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.168710 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.168738 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.168765 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.168784 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:02Z","lastTransitionTime":"2025-12-03T07:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.271171 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.271220 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.271237 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.271259 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.271273 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:02Z","lastTransitionTime":"2025-12-03T07:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.374014 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.374078 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.374161 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.374203 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.374225 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:02Z","lastTransitionTime":"2025-12-03T07:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.477526 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.477591 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.477602 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.477617 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.477628 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:02Z","lastTransitionTime":"2025-12-03T07:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.628504 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.628545 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.628556 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.628573 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.628587 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:02Z","lastTransitionTime":"2025-12-03T07:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.731800 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.731863 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.731882 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.731908 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.731927 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:02Z","lastTransitionTime":"2025-12-03T07:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.835075 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.835749 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.836061 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.836214 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.836348 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:02Z","lastTransitionTime":"2025-12-03T07:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.939606 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.940088 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.940210 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.940307 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:02 crc kubenswrapper[4612]: I1203 07:28:02.940470 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:02Z","lastTransitionTime":"2025-12-03T07:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.044233 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.044287 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.044304 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.044328 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.044347 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:03Z","lastTransitionTime":"2025-12-03T07:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.089249 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.089281 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:03 crc kubenswrapper[4612]: E1203 07:28:03.089371 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:03 crc kubenswrapper[4612]: E1203 07:28:03.089964 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.090069 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:03 crc kubenswrapper[4612]: E1203 07:28:03.090520 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.090989 4612 scope.go:117] "RemoveContainer" containerID="d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.147676 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.148051 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.148070 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.148094 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.148110 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:03Z","lastTransitionTime":"2025-12-03T07:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.250683 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.250729 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.250743 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.250762 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.250775 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:03Z","lastTransitionTime":"2025-12-03T07:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.352724 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.352797 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.352811 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.352860 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.352877 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:03Z","lastTransitionTime":"2025-12-03T07:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.396076 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovnkube-controller/1.log" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.399010 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerStarted","Data":"f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d"} Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.400109 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.413209 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.428923 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.443418 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.455496 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.455526 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.455534 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.455549 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.455558 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:03Z","lastTransitionTime":"2025-12-03T07:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.461037 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.477258 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.500663 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:46Z\\\",\\\"message\\\":\\\"one reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 07:27:46.126233 5963 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126239 5963 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126243 5963 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1203 07:27:46.126255 5963 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF1203 07:27:46.126256 5963 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.515440 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.533714 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.545648 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.558110 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.558157 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.558168 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.558185 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.558196 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:03Z","lastTransitionTime":"2025-12-03T07:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.562697 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.571582 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.591957 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.604423 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.615065 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.628602 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.643805 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.660853 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.660885 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.660896 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.660912 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.660922 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:03Z","lastTransitionTime":"2025-12-03T07:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.763365 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.763413 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.763427 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.763445 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.763456 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:03Z","lastTransitionTime":"2025-12-03T07:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.865691 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.865728 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.865737 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.865750 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.865758 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:03Z","lastTransitionTime":"2025-12-03T07:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.968406 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.968462 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.968473 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.968490 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:03 crc kubenswrapper[4612]: I1203 07:28:03.968499 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:03Z","lastTransitionTime":"2025-12-03T07:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.071343 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.071384 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.071395 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.071419 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.071430 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:04Z","lastTransitionTime":"2025-12-03T07:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.088726 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:04 crc kubenswrapper[4612]: E1203 07:28:04.088975 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.173878 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.173935 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.173969 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.174008 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.174024 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:04Z","lastTransitionTime":"2025-12-03T07:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.277155 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.277212 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.277223 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.277241 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.277252 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:04Z","lastTransitionTime":"2025-12-03T07:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.380478 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.380535 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.380549 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.380572 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.380587 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:04Z","lastTransitionTime":"2025-12-03T07:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.405162 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovnkube-controller/2.log" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.406076 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovnkube-controller/1.log" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.409737 4612 generic.go:334] "Generic (PLEG): container finished" podID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerID="f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d" exitCode=1 Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.409819 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerDied","Data":"f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d"} Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.409925 4612 scope.go:117] "RemoveContainer" containerID="d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.410616 4612 scope.go:117] "RemoveContainer" containerID="f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d" Dec 03 07:28:04 crc kubenswrapper[4612]: E1203 07:28:04.410812 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.424493 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.441762 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.460967 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.480474 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5c3eea64d97722c0d65d9e12bbf6a62c42ec41f5f949643b3105bb6a1069513\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:27:46Z\\\",\\\"message\\\":\\\"one reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 07:27:46.126233 5963 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126239 5963 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:27:46.126243 5963 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1203 07:27:46.126255 5963 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF1203 07:27:46.126256 5963 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:04Z\\\",\\\"message\\\":\\\"sful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nF1203 07:28:03.909758 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z]\\\\nI1203 07:28:03.909770 6172 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nI1203 07:28:03.909772 6172 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 07:28:03.909778 6172 lb_config.go:1031] Cluster endpoints for openshift-kube-apiserver-operator/metrics\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.482520 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.482566 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.482579 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.482593 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.482603 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:04Z","lastTransitionTime":"2025-12-03T07:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.493897 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.505875 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.519727 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.533680 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.549735 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.564619 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.576995 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.584490 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.584519 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.584529 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.584542 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.584551 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:04Z","lastTransitionTime":"2025-12-03T07:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.591458 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.604240 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.616518 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.629290 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.643158 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:04Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.686920 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.686976 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.686988 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.687003 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.687013 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:04Z","lastTransitionTime":"2025-12-03T07:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.790673 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.790729 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.790749 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.790776 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.790798 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:04Z","lastTransitionTime":"2025-12-03T07:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.894558 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.894606 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.894622 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.894643 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.894660 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:04Z","lastTransitionTime":"2025-12-03T07:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.997845 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.997910 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.997928 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.997986 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:04 crc kubenswrapper[4612]: I1203 07:28:04.998007 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:04Z","lastTransitionTime":"2025-12-03T07:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.089259 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.089259 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.089403 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:05 crc kubenswrapper[4612]: E1203 07:28:05.089621 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:05 crc kubenswrapper[4612]: E1203 07:28:05.089774 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:05 crc kubenswrapper[4612]: E1203 07:28:05.089915 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.106809 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.106879 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.106901 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.106929 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.106981 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:05Z","lastTransitionTime":"2025-12-03T07:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.210424 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.210489 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.210506 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.210529 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.210546 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:05Z","lastTransitionTime":"2025-12-03T07:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.312862 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.312927 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.312974 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.312998 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.313017 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:05Z","lastTransitionTime":"2025-12-03T07:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.416351 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.416550 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.416581 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.416613 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.416636 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:05Z","lastTransitionTime":"2025-12-03T07:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.417239 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovnkube-controller/2.log" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.424597 4612 scope.go:117] "RemoveContainer" containerID="f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d" Dec 03 07:28:05 crc kubenswrapper[4612]: E1203 07:28:05.424898 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.445822 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.468610 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.491183 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.506898 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.519284 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.519343 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.519360 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.519383 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.519398 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:05Z","lastTransitionTime":"2025-12-03T07:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.521533 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.536332 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.548905 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.560504 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.575224 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.586671 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.599251 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.614884 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.621902 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.621953 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.621968 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.621984 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.621995 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:05Z","lastTransitionTime":"2025-12-03T07:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.629087 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.641147 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.660939 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:04Z\\\",\\\"message\\\":\\\"sful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nF1203 07:28:03.909758 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z]\\\\nI1203 07:28:03.909770 6172 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nI1203 07:28:03.909772 6172 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 07:28:03.909778 6172 lb_config.go:1031] Cluster endpoints for openshift-kube-apiserver-operator/metrics\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:28:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.674221 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:05Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.725342 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.725580 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.725673 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.725764 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.725847 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:05Z","lastTransitionTime":"2025-12-03T07:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.763303 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs\") pod \"network-metrics-daemon-8m4gl\" (UID: \"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\") " pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:05 crc kubenswrapper[4612]: E1203 07:28:05.763518 4612 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:28:05 crc kubenswrapper[4612]: E1203 07:28:05.763601 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs podName:fca5f46d-010f-4d46-8926-fd2a2cb9ee1e nodeName:}" failed. No retries permitted until 2025-12-03 07:28:21.763579665 +0000 UTC m=+64.936937065 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs") pod "network-metrics-daemon-8m4gl" (UID: "fca5f46d-010f-4d46-8926-fd2a2cb9ee1e") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.829230 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.829281 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.829298 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.829322 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.829339 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:05Z","lastTransitionTime":"2025-12-03T07:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.932431 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.932567 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.932585 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.932608 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:05 crc kubenswrapper[4612]: I1203 07:28:05.932626 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:05Z","lastTransitionTime":"2025-12-03T07:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.035023 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.035084 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.035102 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.035126 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.035143 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:06Z","lastTransitionTime":"2025-12-03T07:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.089277 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:06 crc kubenswrapper[4612]: E1203 07:28:06.089478 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.137854 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.137889 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.137898 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.137911 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.137921 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:06Z","lastTransitionTime":"2025-12-03T07:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.241585 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.241643 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.241657 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.241677 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.241688 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:06Z","lastTransitionTime":"2025-12-03T07:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.344226 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.344269 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.344278 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.344291 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.344301 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:06Z","lastTransitionTime":"2025-12-03T07:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.447094 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.447128 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.447137 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.447152 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.447167 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:06Z","lastTransitionTime":"2025-12-03T07:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.549891 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.549930 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.549959 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.549980 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.549992 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:06Z","lastTransitionTime":"2025-12-03T07:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.652635 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.652714 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.652731 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.652757 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.652802 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:06Z","lastTransitionTime":"2025-12-03T07:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.756232 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.756591 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.756605 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.756621 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.756632 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:06Z","lastTransitionTime":"2025-12-03T07:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.858462 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.858508 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.858522 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.858537 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.858547 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:06Z","lastTransitionTime":"2025-12-03T07:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.876113 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.876308 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.876354 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:06 crc kubenswrapper[4612]: E1203 07:28:06.876425 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:28:38.876382608 +0000 UTC m=+82.049740058 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:28:06 crc kubenswrapper[4612]: E1203 07:28:06.876457 4612 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:28:06 crc kubenswrapper[4612]: E1203 07:28:06.876515 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:28:38.876498831 +0000 UTC m=+82.049856261 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.876505 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:06 crc kubenswrapper[4612]: E1203 07:28:06.876512 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:28:06 crc kubenswrapper[4612]: E1203 07:28:06.876702 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:28:06 crc kubenswrapper[4612]: E1203 07:28:06.876646 4612 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:28:06 crc kubenswrapper[4612]: E1203 07:28:06.876725 4612 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:28:06 crc kubenswrapper[4612]: E1203 07:28:06.876782 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 07:28:38.876768078 +0000 UTC m=+82.050125508 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:28:06 crc kubenswrapper[4612]: E1203 07:28:06.876805 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:28:38.876793879 +0000 UTC m=+82.050151309 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.961759 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.961806 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.961817 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.961832 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.961842 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:06Z","lastTransitionTime":"2025-12-03T07:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:06 crc kubenswrapper[4612]: I1203 07:28:06.978236 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:06 crc kubenswrapper[4612]: E1203 07:28:06.978409 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:28:06 crc kubenswrapper[4612]: E1203 07:28:06.978427 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:28:06 crc kubenswrapper[4612]: E1203 07:28:06.978440 4612 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:28:06 crc kubenswrapper[4612]: E1203 07:28:06.978490 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 07:28:38.978475431 +0000 UTC m=+82.151832841 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.065033 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.065092 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.065114 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.065140 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.065161 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:07Z","lastTransitionTime":"2025-12-03T07:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.088877 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.089002 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:07 crc kubenswrapper[4612]: E1203 07:28:07.089137 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.089216 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:07 crc kubenswrapper[4612]: E1203 07:28:07.089381 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:07 crc kubenswrapper[4612]: E1203 07:28:07.089462 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.101917 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.113931 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.127747 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.140631 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.152861 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.167177 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.167245 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.167254 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.167265 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.167274 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:07Z","lastTransitionTime":"2025-12-03T07:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.190089 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:04Z\\\",\\\"message\\\":\\\"sful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nF1203 07:28:03.909758 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z]\\\\nI1203 07:28:03.909770 6172 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nI1203 07:28:03.909772 6172 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 07:28:03.909778 6172 lb_config.go:1031] Cluster endpoints for openshift-kube-apiserver-operator/metrics\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:28:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.203916 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.216359 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.231582 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.246999 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.262274 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.269583 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.269649 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.269660 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.269676 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.269689 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:07Z","lastTransitionTime":"2025-12-03T07:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.275572 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.286894 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.300742 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.312091 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.324754 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:07Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.371809 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.371855 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.371866 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.371883 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.371896 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:07Z","lastTransitionTime":"2025-12-03T07:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.474968 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.475031 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.475041 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.475064 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.475076 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:07Z","lastTransitionTime":"2025-12-03T07:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.578701 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.578758 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.578772 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.578793 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.578807 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:07Z","lastTransitionTime":"2025-12-03T07:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.681679 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.681711 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.681721 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.681734 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.681743 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:07Z","lastTransitionTime":"2025-12-03T07:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.784076 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.784106 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.784114 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.784129 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.784138 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:07Z","lastTransitionTime":"2025-12-03T07:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.886838 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.886880 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.886889 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.886906 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.886915 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:07Z","lastTransitionTime":"2025-12-03T07:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.990663 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.990722 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.990738 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.990783 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:07 crc kubenswrapper[4612]: I1203 07:28:07.990800 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:07Z","lastTransitionTime":"2025-12-03T07:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.089220 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:08 crc kubenswrapper[4612]: E1203 07:28:08.089372 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.093195 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.093239 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.093275 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.093292 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.093301 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:08Z","lastTransitionTime":"2025-12-03T07:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.195558 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.195594 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.195605 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.195621 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.195632 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:08Z","lastTransitionTime":"2025-12-03T07:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.298617 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.298676 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.298694 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.298718 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.298737 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:08Z","lastTransitionTime":"2025-12-03T07:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.402135 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.402187 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.402203 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.402223 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.402236 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:08Z","lastTransitionTime":"2025-12-03T07:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.453798 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.453845 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.453860 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.453880 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.453896 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:08Z","lastTransitionTime":"2025-12-03T07:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:08 crc kubenswrapper[4612]: E1203 07:28:08.469389 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:08Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.472883 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.472914 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.472923 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.472934 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.472958 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:08Z","lastTransitionTime":"2025-12-03T07:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:08 crc kubenswrapper[4612]: E1203 07:28:08.485033 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:08Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.489777 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.490045 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.490291 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.490481 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.490657 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:08Z","lastTransitionTime":"2025-12-03T07:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:08 crc kubenswrapper[4612]: E1203 07:28:08.509130 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:08Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.513977 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.514185 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.514264 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.514367 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.514455 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:08Z","lastTransitionTime":"2025-12-03T07:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:08 crc kubenswrapper[4612]: E1203 07:28:08.526105 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:08Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.530208 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.530300 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.530318 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.530343 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.530360 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:08Z","lastTransitionTime":"2025-12-03T07:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:08 crc kubenswrapper[4612]: E1203 07:28:08.545936 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:08Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:08 crc kubenswrapper[4612]: E1203 07:28:08.546146 4612 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.547595 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.547626 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.547636 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.547654 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.547666 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:08Z","lastTransitionTime":"2025-12-03T07:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.650760 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.650811 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.650822 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.650838 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.650850 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:08Z","lastTransitionTime":"2025-12-03T07:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.753170 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.753241 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.753263 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.753291 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.753312 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:08Z","lastTransitionTime":"2025-12-03T07:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.856408 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.856479 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.856491 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.856506 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.856517 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:08Z","lastTransitionTime":"2025-12-03T07:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.958601 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.958651 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.958661 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.958677 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:08 crc kubenswrapper[4612]: I1203 07:28:08.958687 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:08Z","lastTransitionTime":"2025-12-03T07:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.061278 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.061318 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.061352 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.061369 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.061381 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:09Z","lastTransitionTime":"2025-12-03T07:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.078464 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.088075 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.089522 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.089577 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.089531 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:09 crc kubenswrapper[4612]: E1203 07:28:09.089692 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:09 crc kubenswrapper[4612]: E1203 07:28:09.089795 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:09 crc kubenswrapper[4612]: E1203 07:28:09.089888 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.093293 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.104335 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.120252 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.133561 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.148746 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.163826 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.163858 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.163873 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.163888 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.163898 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:09Z","lastTransitionTime":"2025-12-03T07:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.179099 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.190851 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.200470 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.210550 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.224047 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.240739 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:04Z\\\",\\\"message\\\":\\\"sful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nF1203 07:28:03.909758 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z]\\\\nI1203 07:28:03.909770 6172 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nI1203 07:28:03.909772 6172 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 07:28:03.909778 6172 lb_config.go:1031] Cluster endpoints for openshift-kube-apiserver-operator/metrics\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:28:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.249724 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.258687 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.266331 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.266356 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.266366 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.266378 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.266387 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:09Z","lastTransitionTime":"2025-12-03T07:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.271162 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.283229 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.294870 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:09Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.369668 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.369704 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.369715 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.369730 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.369740 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:09Z","lastTransitionTime":"2025-12-03T07:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.472204 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.472238 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.472246 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.472260 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.472268 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:09Z","lastTransitionTime":"2025-12-03T07:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.574100 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.574163 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.574180 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.574206 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.574229 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:09Z","lastTransitionTime":"2025-12-03T07:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.676855 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.676898 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.676907 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.676922 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.676933 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:09Z","lastTransitionTime":"2025-12-03T07:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.780287 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.780353 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.780376 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.780419 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.780445 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:09Z","lastTransitionTime":"2025-12-03T07:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.884274 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.884381 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.884408 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.884455 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.884479 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:09Z","lastTransitionTime":"2025-12-03T07:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.987765 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.988176 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.988336 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.988487 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:09 crc kubenswrapper[4612]: I1203 07:28:09.988637 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:09Z","lastTransitionTime":"2025-12-03T07:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.088513 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:10 crc kubenswrapper[4612]: E1203 07:28:10.089373 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.091751 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.091787 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.091796 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.091813 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.091825 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:10Z","lastTransitionTime":"2025-12-03T07:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.194798 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.194863 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.194887 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.194917 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.194935 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:10Z","lastTransitionTime":"2025-12-03T07:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.297886 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.298068 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.298104 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.298137 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.298156 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:10Z","lastTransitionTime":"2025-12-03T07:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.400715 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.400784 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.400801 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.400824 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.400841 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:10Z","lastTransitionTime":"2025-12-03T07:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.503686 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.503724 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.503735 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.503752 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.503764 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:10Z","lastTransitionTime":"2025-12-03T07:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.606994 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.607062 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.607081 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.607104 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.607120 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:10Z","lastTransitionTime":"2025-12-03T07:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.710446 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.710518 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.710533 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.710556 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.710571 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:10Z","lastTransitionTime":"2025-12-03T07:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.813394 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.813432 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.813440 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.813455 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.813464 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:10Z","lastTransitionTime":"2025-12-03T07:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.915953 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.915988 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.916007 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.916022 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:10 crc kubenswrapper[4612]: I1203 07:28:10.916045 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:10Z","lastTransitionTime":"2025-12-03T07:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.019100 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.019142 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.019154 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.019170 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.019183 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:11Z","lastTransitionTime":"2025-12-03T07:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.088440 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:11 crc kubenswrapper[4612]: E1203 07:28:11.088586 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.088801 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:11 crc kubenswrapper[4612]: E1203 07:28:11.088849 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.088994 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:11 crc kubenswrapper[4612]: E1203 07:28:11.089037 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.121471 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.121512 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.121522 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.121543 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.121553 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:11Z","lastTransitionTime":"2025-12-03T07:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.224213 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.224244 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.224253 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.224265 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.224276 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:11Z","lastTransitionTime":"2025-12-03T07:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.327657 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.327705 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.327715 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.327733 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.327749 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:11Z","lastTransitionTime":"2025-12-03T07:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.430213 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.430260 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.430270 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.430283 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.430293 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:11Z","lastTransitionTime":"2025-12-03T07:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.533550 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.533587 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.533598 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.533613 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.533624 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:11Z","lastTransitionTime":"2025-12-03T07:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.637111 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.637157 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.637168 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.637188 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.637201 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:11Z","lastTransitionTime":"2025-12-03T07:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.739983 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.740025 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.740036 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.740053 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.740064 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:11Z","lastTransitionTime":"2025-12-03T07:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.843462 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.843519 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.843541 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.843567 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.843590 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:11Z","lastTransitionTime":"2025-12-03T07:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.946466 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.946609 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.946620 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.946636 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:11 crc kubenswrapper[4612]: I1203 07:28:11.946648 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:11Z","lastTransitionTime":"2025-12-03T07:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.048450 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.048501 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.048512 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.048526 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.048537 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:12Z","lastTransitionTime":"2025-12-03T07:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.088976 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:12 crc kubenswrapper[4612]: E1203 07:28:12.089131 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.151100 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.151201 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.151219 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.151244 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.151262 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:12Z","lastTransitionTime":"2025-12-03T07:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.253986 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.254032 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.254043 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.254060 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.254072 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:12Z","lastTransitionTime":"2025-12-03T07:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.356830 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.356868 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.356877 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.356891 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.356900 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:12Z","lastTransitionTime":"2025-12-03T07:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.459348 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.459411 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.459421 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.459440 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.459453 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:12Z","lastTransitionTime":"2025-12-03T07:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.562459 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.562506 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.562520 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.562538 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.562549 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:12Z","lastTransitionTime":"2025-12-03T07:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.664463 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.664508 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.664519 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.664539 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.664556 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:12Z","lastTransitionTime":"2025-12-03T07:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.767046 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.767074 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.767083 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.767096 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.767113 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:12Z","lastTransitionTime":"2025-12-03T07:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.869604 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.869651 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.869659 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.869672 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.869681 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:12Z","lastTransitionTime":"2025-12-03T07:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.972259 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.972305 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.972317 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.972335 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:12 crc kubenswrapper[4612]: I1203 07:28:12.972347 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:12Z","lastTransitionTime":"2025-12-03T07:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.075629 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.075677 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.075690 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.075707 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.075721 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:13Z","lastTransitionTime":"2025-12-03T07:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.089478 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.089535 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:13 crc kubenswrapper[4612]: E1203 07:28:13.089614 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.089719 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:13 crc kubenswrapper[4612]: E1203 07:28:13.089836 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:13 crc kubenswrapper[4612]: E1203 07:28:13.090205 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.178313 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.178373 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.178447 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.178470 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.178486 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:13Z","lastTransitionTime":"2025-12-03T07:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.281449 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.281492 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.281504 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.281528 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.281542 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:13Z","lastTransitionTime":"2025-12-03T07:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.384904 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.384938 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.384963 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.384977 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.384986 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:13Z","lastTransitionTime":"2025-12-03T07:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.488155 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.488192 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.488202 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.488217 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.488228 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:13Z","lastTransitionTime":"2025-12-03T07:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.591729 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.591785 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.591807 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.591838 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.591862 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:13Z","lastTransitionTime":"2025-12-03T07:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.694399 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.694733 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.694827 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.694917 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.695025 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:13Z","lastTransitionTime":"2025-12-03T07:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.797809 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.797864 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.797882 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.797906 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.797923 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:13Z","lastTransitionTime":"2025-12-03T07:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.900798 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.900896 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.900914 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.901000 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:13 crc kubenswrapper[4612]: I1203 07:28:13.901095 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:13Z","lastTransitionTime":"2025-12-03T07:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.003880 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.004476 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.004582 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.004681 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.004769 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:14Z","lastTransitionTime":"2025-12-03T07:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.089100 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:14 crc kubenswrapper[4612]: E1203 07:28:14.089255 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.107872 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.108688 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.109145 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.109375 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.109537 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:14Z","lastTransitionTime":"2025-12-03T07:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.212098 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.212173 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.212220 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.212237 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.212246 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:14Z","lastTransitionTime":"2025-12-03T07:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.314640 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.314707 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.314725 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.314745 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.314789 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:14Z","lastTransitionTime":"2025-12-03T07:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.417624 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.417683 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.417693 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.417710 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.417722 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:14Z","lastTransitionTime":"2025-12-03T07:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.521666 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.521748 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.521790 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.521811 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.521824 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:14Z","lastTransitionTime":"2025-12-03T07:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.625622 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.625696 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.625709 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.625731 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.625745 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:14Z","lastTransitionTime":"2025-12-03T07:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.729250 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.729299 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.729311 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.729331 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.729342 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:14Z","lastTransitionTime":"2025-12-03T07:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.832923 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.833438 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.833610 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.833775 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.833923 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:14Z","lastTransitionTime":"2025-12-03T07:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.938021 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.938398 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.938587 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.938744 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:14 crc kubenswrapper[4612]: I1203 07:28:14.938890 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:14Z","lastTransitionTime":"2025-12-03T07:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.042248 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.042313 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.042330 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.042358 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.042378 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:15Z","lastTransitionTime":"2025-12-03T07:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.089557 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.089615 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.089574 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:15 crc kubenswrapper[4612]: E1203 07:28:15.089770 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:15 crc kubenswrapper[4612]: E1203 07:28:15.089876 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:15 crc kubenswrapper[4612]: E1203 07:28:15.090070 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.145328 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.145818 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.145910 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.146214 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.146286 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:15Z","lastTransitionTime":"2025-12-03T07:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.249564 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.249641 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.249664 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.249693 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.249717 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:15Z","lastTransitionTime":"2025-12-03T07:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.352567 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.353003 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.353245 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.353444 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.353591 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:15Z","lastTransitionTime":"2025-12-03T07:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.457701 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.458375 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.458809 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.459215 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.459577 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:15Z","lastTransitionTime":"2025-12-03T07:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.562536 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.562617 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.562631 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.562649 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.562662 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:15Z","lastTransitionTime":"2025-12-03T07:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.665698 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.665744 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.665755 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.665774 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.665786 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:15Z","lastTransitionTime":"2025-12-03T07:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.768831 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.768887 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.768901 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.768920 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.768935 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:15Z","lastTransitionTime":"2025-12-03T07:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.871174 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.871223 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.871239 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.871263 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.871280 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:15Z","lastTransitionTime":"2025-12-03T07:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.974218 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.974532 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.974651 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.974826 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:15 crc kubenswrapper[4612]: I1203 07:28:15.974979 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:15Z","lastTransitionTime":"2025-12-03T07:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.081410 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.081584 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.081609 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.081640 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.081662 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:16Z","lastTransitionTime":"2025-12-03T07:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.089411 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:16 crc kubenswrapper[4612]: E1203 07:28:16.089659 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.184664 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.184710 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.184718 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.184730 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.184739 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:16Z","lastTransitionTime":"2025-12-03T07:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.287840 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.287889 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.287905 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.287919 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.287960 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:16Z","lastTransitionTime":"2025-12-03T07:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.391072 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.391160 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.391175 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.391191 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.391205 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:16Z","lastTransitionTime":"2025-12-03T07:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.493803 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.493836 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.493847 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.493866 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.493877 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:16Z","lastTransitionTime":"2025-12-03T07:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.596689 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.596788 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.596808 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.596832 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.596850 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:16Z","lastTransitionTime":"2025-12-03T07:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.699393 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.699687 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.699927 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.700156 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.700339 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:16Z","lastTransitionTime":"2025-12-03T07:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.802727 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.803077 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.803172 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.803262 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.803353 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:16Z","lastTransitionTime":"2025-12-03T07:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.906288 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.906360 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.906376 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.906832 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:16 crc kubenswrapper[4612]: I1203 07:28:16.906886 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:16Z","lastTransitionTime":"2025-12-03T07:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.009617 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.009659 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.009670 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.009686 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.009736 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:17Z","lastTransitionTime":"2025-12-03T07:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.088683 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.088741 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.088880 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:17 crc kubenswrapper[4612]: E1203 07:28:17.089329 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:17 crc kubenswrapper[4612]: E1203 07:28:17.089346 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:17 crc kubenswrapper[4612]: E1203 07:28:17.089528 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.101733 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.112243 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.112302 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.112314 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.112330 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.112341 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:17Z","lastTransitionTime":"2025-12-03T07:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.113326 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.126958 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.138177 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5422811f-15b4-4f15-ba7c-08620834d727\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b14431f707f446113fc666ec20580efa40e4527ea12f5bd9f655b26f1dcb7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee06d2486ad57dcd6368958613d466c55cab0274cee251e869c77e7ca78eb9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cb8d770c695ed6b657d883fc2ab7886409322c493091b02986fe4531dfad3a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b61ea23a35c79fcde906bd1219f77e3bcca629c20085b3f31ffd9712080939c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b61ea23a35c79fcde906bd1219f77e3bcca629c20085b3f31ffd9712080939c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.150630 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.175479 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.217099 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.217134 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.217145 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.217161 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.217172 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:17Z","lastTransitionTime":"2025-12-03T07:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.217706 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.241590 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.252698 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.262921 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.278208 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:04Z\\\",\\\"message\\\":\\\"sful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nF1203 07:28:03.909758 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z]\\\\nI1203 07:28:03.909770 6172 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nI1203 07:28:03.909772 6172 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 07:28:03.909778 6172 lb_config.go:1031] Cluster endpoints for openshift-kube-apiserver-operator/metrics\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:28:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.287029 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.299307 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.310777 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.319610 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.319659 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.319672 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.319689 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.319701 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:17Z","lastTransitionTime":"2025-12-03T07:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.335538 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.348042 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.359564 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:17Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.421531 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.421769 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.421877 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.421987 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.422070 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:17Z","lastTransitionTime":"2025-12-03T07:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.523771 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.524040 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.524321 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.524447 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.524516 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:17Z","lastTransitionTime":"2025-12-03T07:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.627098 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.627144 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.627154 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.627170 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.627181 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:17Z","lastTransitionTime":"2025-12-03T07:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.730281 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.730358 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.730380 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.730407 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.730430 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:17Z","lastTransitionTime":"2025-12-03T07:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.833039 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.833093 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.833104 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.833120 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.833131 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:17Z","lastTransitionTime":"2025-12-03T07:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.935332 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.935369 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.935377 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.935390 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:17 crc kubenswrapper[4612]: I1203 07:28:17.935400 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:17Z","lastTransitionTime":"2025-12-03T07:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.038214 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.038248 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.038259 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.038276 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.038287 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.088647 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:18 crc kubenswrapper[4612]: E1203 07:28:18.088884 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.141080 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.141117 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.141130 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.141145 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.141157 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.243423 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.244065 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.244093 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.244110 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.244123 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.346885 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.346925 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.346957 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.346977 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.346990 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.448804 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.448864 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.448931 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.449013 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.449067 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.551917 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.552040 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.552069 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.552097 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.552119 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.653930 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.653993 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.654004 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.654022 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.654034 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.757375 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.757638 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.757712 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.757803 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.757894 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.861225 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.861268 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.861278 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.861296 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.861306 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.895397 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.895789 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.895939 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.896114 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.896241 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: E1203 07:28:18.914353 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:18Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.918350 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.918394 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.918406 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.918423 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.918434 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: E1203 07:28:18.930372 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:18Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.933583 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.933608 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.933632 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.933645 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.933653 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: E1203 07:28:18.945250 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:18Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.948642 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.948759 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.948837 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.948909 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.949020 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: E1203 07:28:18.961640 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:18Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.971671 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.972177 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.972325 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.972457 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.972730 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:18 crc kubenswrapper[4612]: E1203 07:28:18.986485 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:18Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:18 crc kubenswrapper[4612]: E1203 07:28:18.986660 4612 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.988775 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.988908 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.989039 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.989140 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:18 crc kubenswrapper[4612]: I1203 07:28:18.989249 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:18Z","lastTransitionTime":"2025-12-03T07:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.089400 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:19 crc kubenswrapper[4612]: E1203 07:28:19.089542 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.089409 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:19 crc kubenswrapper[4612]: E1203 07:28:19.089744 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.090292 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:19 crc kubenswrapper[4612]: E1203 07:28:19.091162 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.093121 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.093150 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.093162 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.093200 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.093213 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:19Z","lastTransitionTime":"2025-12-03T07:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.196062 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.196097 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.196109 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.196124 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.196133 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:19Z","lastTransitionTime":"2025-12-03T07:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.298260 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.298782 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.298854 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.298921 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.299001 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:19Z","lastTransitionTime":"2025-12-03T07:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.402285 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.402339 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.402385 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.402408 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.402425 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:19Z","lastTransitionTime":"2025-12-03T07:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.504788 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.504825 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.504834 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.504847 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.504855 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:19Z","lastTransitionTime":"2025-12-03T07:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.607102 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.607155 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.607168 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.607185 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.607200 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:19Z","lastTransitionTime":"2025-12-03T07:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.709293 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.709581 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.709682 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.709776 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.709900 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:19Z","lastTransitionTime":"2025-12-03T07:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.812678 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.812981 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.813046 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.813132 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.813239 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:19Z","lastTransitionTime":"2025-12-03T07:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.915346 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.915402 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.915415 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.915432 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:19 crc kubenswrapper[4612]: I1203 07:28:19.915444 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:19Z","lastTransitionTime":"2025-12-03T07:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.018038 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.018117 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.018132 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.018149 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.018161 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:20Z","lastTransitionTime":"2025-12-03T07:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.089097 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:20 crc kubenswrapper[4612]: E1203 07:28:20.089229 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.090152 4612 scope.go:117] "RemoveContainer" containerID="f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d" Dec 03 07:28:20 crc kubenswrapper[4612]: E1203 07:28:20.090445 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.120386 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.120426 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.120438 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.120453 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.120492 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:20Z","lastTransitionTime":"2025-12-03T07:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.222795 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.222848 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.222858 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.222873 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.222886 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:20Z","lastTransitionTime":"2025-12-03T07:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.325300 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.325331 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.325339 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.325350 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.325359 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:20Z","lastTransitionTime":"2025-12-03T07:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.427740 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.427779 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.427788 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.427802 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.427811 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:20Z","lastTransitionTime":"2025-12-03T07:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.530778 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.530813 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.530821 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.530834 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.530843 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:20Z","lastTransitionTime":"2025-12-03T07:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.633898 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.633977 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.633989 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.634007 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.634022 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:20Z","lastTransitionTime":"2025-12-03T07:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.736578 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.736631 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.736644 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.736659 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.736669 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:20Z","lastTransitionTime":"2025-12-03T07:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.838302 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.838338 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.838348 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.838395 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.838410 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:20Z","lastTransitionTime":"2025-12-03T07:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.941438 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.941478 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.941489 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.941504 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:20 crc kubenswrapper[4612]: I1203 07:28:20.941514 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:20Z","lastTransitionTime":"2025-12-03T07:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.043822 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.044078 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.044177 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.044255 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.044318 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:21Z","lastTransitionTime":"2025-12-03T07:28:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.088901 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:21 crc kubenswrapper[4612]: E1203 07:28:21.089032 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.089217 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:21 crc kubenswrapper[4612]: E1203 07:28:21.089275 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.089417 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:21 crc kubenswrapper[4612]: E1203 07:28:21.089492 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.146522 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.146547 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.146554 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.146566 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.146575 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:21Z","lastTransitionTime":"2025-12-03T07:28:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.248898 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.249188 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.249314 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.249406 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.249507 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:21Z","lastTransitionTime":"2025-12-03T07:28:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.352109 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.352749 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.352835 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.352937 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.353056 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:21Z","lastTransitionTime":"2025-12-03T07:28:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.455778 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.455839 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.455853 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.455881 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.455892 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:21Z","lastTransitionTime":"2025-12-03T07:28:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.558135 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.558198 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.558405 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.558471 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.558492 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:21Z","lastTransitionTime":"2025-12-03T07:28:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.661197 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.661227 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.661235 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.661248 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.661257 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:21Z","lastTransitionTime":"2025-12-03T07:28:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.762973 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.763024 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.763034 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.763048 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.763059 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:21Z","lastTransitionTime":"2025-12-03T07:28:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.845845 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs\") pod \"network-metrics-daemon-8m4gl\" (UID: \"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\") " pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:21 crc kubenswrapper[4612]: E1203 07:28:21.846016 4612 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:28:21 crc kubenswrapper[4612]: E1203 07:28:21.846422 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs podName:fca5f46d-010f-4d46-8926-fd2a2cb9ee1e nodeName:}" failed. No retries permitted until 2025-12-03 07:28:53.846403698 +0000 UTC m=+97.019761098 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs") pod "network-metrics-daemon-8m4gl" (UID: "fca5f46d-010f-4d46-8926-fd2a2cb9ee1e") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.866104 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.866158 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.866172 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.866193 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.866209 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:21Z","lastTransitionTime":"2025-12-03T07:28:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.968421 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.968467 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.968479 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.968495 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:21 crc kubenswrapper[4612]: I1203 07:28:21.968506 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:21Z","lastTransitionTime":"2025-12-03T07:28:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.070647 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.070679 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.070687 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.070699 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.070708 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:22Z","lastTransitionTime":"2025-12-03T07:28:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.088393 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:22 crc kubenswrapper[4612]: E1203 07:28:22.088507 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.174035 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.174082 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.174095 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.174112 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.174123 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:22Z","lastTransitionTime":"2025-12-03T07:28:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.277099 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.277136 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.277145 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.277160 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.277171 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:22Z","lastTransitionTime":"2025-12-03T07:28:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.379925 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.380021 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.380232 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.380265 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.380283 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:22Z","lastTransitionTime":"2025-12-03T07:28:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.473886 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p52kb_b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d/kube-multus/0.log" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.473967 4612 generic.go:334] "Generic (PLEG): container finished" podID="b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d" containerID="b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72" exitCode=1 Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.474003 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-p52kb" event={"ID":"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d","Type":"ContainerDied","Data":"b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72"} Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.474415 4612 scope.go:117] "RemoveContainer" containerID="b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.484615 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.484650 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.484660 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.484678 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.484692 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:22Z","lastTransitionTime":"2025-12-03T07:28:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.487206 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.505280 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.516591 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.528873 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.541108 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5422811f-15b4-4f15-ba7c-08620834d727\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b14431f707f446113fc666ec20580efa40e4527ea12f5bd9f655b26f1dcb7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee06d2486ad57dcd6368958613d466c55cab0274cee251e869c77e7ca78eb9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cb8d770c695ed6b657d883fc2ab7886409322c493091b02986fe4531dfad3a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b61ea23a35c79fcde906bd1219f77e3bcca629c20085b3f31ffd9712080939c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b61ea23a35c79fcde906bd1219f77e3bcca629c20085b3f31ffd9712080939c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.553336 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.574456 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:04Z\\\",\\\"message\\\":\\\"sful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nF1203 07:28:03.909758 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z]\\\\nI1203 07:28:03.909770 6172 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nI1203 07:28:03.909772 6172 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 07:28:03.909778 6172 lb_config.go:1031] Cluster endpoints for openshift-kube-apiserver-operator/metrics\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:28:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.584695 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.586326 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.586347 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.586356 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.586367 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.586376 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:22Z","lastTransitionTime":"2025-12-03T07:28:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.596391 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.608076 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.619979 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.634517 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:21Z\\\",\\\"message\\\":\\\"2025-12-03T07:27:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_bcd94373-94a0-4a19-88d3-1a5ccd4f6f64\\\\n2025-12-03T07:27:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_bcd94373-94a0-4a19-88d3-1a5ccd4f6f64 to /host/opt/cni/bin/\\\\n2025-12-03T07:27:36Z [verbose] multus-daemon started\\\\n2025-12-03T07:27:36Z [verbose] Readiness Indicator file check\\\\n2025-12-03T07:28:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.644318 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.656807 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.666901 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.679344 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.688720 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.688932 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.689032 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.689119 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.689180 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:22Z","lastTransitionTime":"2025-12-03T07:28:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.695220 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:22Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.791315 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.791356 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.791364 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.791379 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.791388 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:22Z","lastTransitionTime":"2025-12-03T07:28:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.893478 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.893529 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.893541 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.893558 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:22 crc kubenswrapper[4612]: I1203 07:28:22.893576 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:22Z","lastTransitionTime":"2025-12-03T07:28:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.000848 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.000886 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.000895 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.000910 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.000919 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:23Z","lastTransitionTime":"2025-12-03T07:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.088872 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.088872 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.088931 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:23 crc kubenswrapper[4612]: E1203 07:28:23.089602 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:23 crc kubenswrapper[4612]: E1203 07:28:23.089717 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:23 crc kubenswrapper[4612]: E1203 07:28:23.089913 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.102667 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.102699 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.102710 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.102723 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.102734 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:23Z","lastTransitionTime":"2025-12-03T07:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.204763 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.204821 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.204844 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.204870 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.204890 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:23Z","lastTransitionTime":"2025-12-03T07:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.307428 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.307458 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.307482 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.307495 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.307504 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:23Z","lastTransitionTime":"2025-12-03T07:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.409557 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.410123 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.410142 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.410158 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.410169 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:23Z","lastTransitionTime":"2025-12-03T07:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.478220 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p52kb_b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d/kube-multus/0.log" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.478261 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-p52kb" event={"ID":"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d","Type":"ContainerStarted","Data":"1e2d8f268ad6bc13c478d54774c1d91ca7f7512b0f5c675f2a360be64babac7c"} Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.491220 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.504353 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.512322 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.512365 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.512377 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.512394 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.512408 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:23Z","lastTransitionTime":"2025-12-03T07:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.516466 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e2d8f268ad6bc13c478d54774c1d91ca7f7512b0f5c675f2a360be64babac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:21Z\\\",\\\"message\\\":\\\"2025-12-03T07:27:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_bcd94373-94a0-4a19-88d3-1a5ccd4f6f64\\\\n2025-12-03T07:27:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_bcd94373-94a0-4a19-88d3-1a5ccd4f6f64 to /host/opt/cni/bin/\\\\n2025-12-03T07:27:36Z [verbose] multus-daemon started\\\\n2025-12-03T07:27:36Z [verbose] Readiness Indicator file check\\\\n2025-12-03T07:28:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.534092 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:04Z\\\",\\\"message\\\":\\\"sful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nF1203 07:28:03.909758 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z]\\\\nI1203 07:28:03.909770 6172 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nI1203 07:28:03.909772 6172 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 07:28:03.909778 6172 lb_config.go:1031] Cluster endpoints for openshift-kube-apiserver-operator/metrics\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:28:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.545822 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.555770 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.566442 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.576693 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.596331 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.607170 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.614464 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.614491 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.614499 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.614513 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.614521 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:23Z","lastTransitionTime":"2025-12-03T07:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.616543 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.628598 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.639151 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.649490 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.659039 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5422811f-15b4-4f15-ba7c-08620834d727\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b14431f707f446113fc666ec20580efa40e4527ea12f5bd9f655b26f1dcb7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee06d2486ad57dcd6368958613d466c55cab0274cee251e869c77e7ca78eb9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cb8d770c695ed6b657d883fc2ab7886409322c493091b02986fe4531dfad3a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b61ea23a35c79fcde906bd1219f77e3bcca629c20085b3f31ffd9712080939c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b61ea23a35c79fcde906bd1219f77e3bcca629c20085b3f31ffd9712080939c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.671587 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.685278 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:23Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.717066 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.717102 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.717113 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.717130 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.717141 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:23Z","lastTransitionTime":"2025-12-03T07:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.819313 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.819352 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.819366 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.819383 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.819396 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:23Z","lastTransitionTime":"2025-12-03T07:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.922033 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.922099 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.922118 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.922144 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:23 crc kubenswrapper[4612]: I1203 07:28:23.922163 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:23Z","lastTransitionTime":"2025-12-03T07:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.024988 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.025027 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.025036 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.025116 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.025136 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:24Z","lastTransitionTime":"2025-12-03T07:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.089318 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:24 crc kubenswrapper[4612]: E1203 07:28:24.089433 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.127256 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.127311 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.127320 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.127332 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.127342 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:24Z","lastTransitionTime":"2025-12-03T07:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.229764 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.229816 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.229828 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.229844 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.229855 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:24Z","lastTransitionTime":"2025-12-03T07:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.332811 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.332856 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.332868 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.332884 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.332895 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:24Z","lastTransitionTime":"2025-12-03T07:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.434584 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.434621 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.434629 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.434646 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.434656 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:24Z","lastTransitionTime":"2025-12-03T07:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.537448 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.537486 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.537496 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.537510 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.537518 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:24Z","lastTransitionTime":"2025-12-03T07:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.639211 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.639246 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.639254 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.639267 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.639275 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:24Z","lastTransitionTime":"2025-12-03T07:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.741628 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.741708 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.741721 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.741733 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.741743 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:24Z","lastTransitionTime":"2025-12-03T07:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.843455 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.843483 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.843490 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.843502 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.843537 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:24Z","lastTransitionTime":"2025-12-03T07:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.945998 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.946038 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.946051 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.946067 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:24 crc kubenswrapper[4612]: I1203 07:28:24.946079 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:24Z","lastTransitionTime":"2025-12-03T07:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.049296 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.049334 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.049345 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.049359 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.049367 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:25Z","lastTransitionTime":"2025-12-03T07:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.089000 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.089059 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:25 crc kubenswrapper[4612]: E1203 07:28:25.089155 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.089176 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:25 crc kubenswrapper[4612]: E1203 07:28:25.089267 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:25 crc kubenswrapper[4612]: E1203 07:28:25.089330 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.151409 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.151454 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.151465 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.151480 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.151490 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:25Z","lastTransitionTime":"2025-12-03T07:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.253924 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.253989 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.254003 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.254023 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.254035 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:25Z","lastTransitionTime":"2025-12-03T07:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.355937 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.355992 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.356001 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.356014 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.356023 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:25Z","lastTransitionTime":"2025-12-03T07:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.458042 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.458077 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.458085 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.458099 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.458108 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:25Z","lastTransitionTime":"2025-12-03T07:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.560617 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.560680 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.560700 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.560726 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.560743 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:25Z","lastTransitionTime":"2025-12-03T07:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.662752 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.662805 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.662815 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.662831 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.662842 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:25Z","lastTransitionTime":"2025-12-03T07:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.764998 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.765036 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.765045 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.765059 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.765068 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:25Z","lastTransitionTime":"2025-12-03T07:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.867255 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.867325 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.867337 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.867376 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.867391 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:25Z","lastTransitionTime":"2025-12-03T07:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.970278 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.970320 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.970336 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.970359 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:25 crc kubenswrapper[4612]: I1203 07:28:25.970374 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:25Z","lastTransitionTime":"2025-12-03T07:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.072374 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.072409 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.072417 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.072431 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.072441 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:26Z","lastTransitionTime":"2025-12-03T07:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.088730 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:26 crc kubenswrapper[4612]: E1203 07:28:26.088850 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.174889 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.174923 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.174941 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.174971 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.174982 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:26Z","lastTransitionTime":"2025-12-03T07:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.277552 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.277621 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.277643 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.277669 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.277686 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:26Z","lastTransitionTime":"2025-12-03T07:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.380521 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.380555 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.380563 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.380576 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.380589 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:26Z","lastTransitionTime":"2025-12-03T07:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.483401 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.483429 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.483436 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.483453 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.483462 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:26Z","lastTransitionTime":"2025-12-03T07:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.585284 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.585330 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.585339 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.585350 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.585360 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:26Z","lastTransitionTime":"2025-12-03T07:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.687376 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.687413 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.687422 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.687439 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.687449 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:26Z","lastTransitionTime":"2025-12-03T07:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.789517 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.789554 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.789564 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.789577 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.789586 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:26Z","lastTransitionTime":"2025-12-03T07:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.891502 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.891543 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.891553 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.891574 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.891584 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:26Z","lastTransitionTime":"2025-12-03T07:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.993857 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.993914 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.993928 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.993964 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:26 crc kubenswrapper[4612]: I1203 07:28:26.993978 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:26Z","lastTransitionTime":"2025-12-03T07:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.088761 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.088862 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.088884 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:27 crc kubenswrapper[4612]: E1203 07:28:27.088906 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:27 crc kubenswrapper[4612]: E1203 07:28:27.089060 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:27 crc kubenswrapper[4612]: E1203 07:28:27.089217 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.095410 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.095446 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.095458 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.095474 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.095487 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:27Z","lastTransitionTime":"2025-12-03T07:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.101524 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.112385 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.126706 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.136380 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.146622 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.164607 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.176438 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.186387 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.196131 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5422811f-15b4-4f15-ba7c-08620834d727\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b14431f707f446113fc666ec20580efa40e4527ea12f5bd9f655b26f1dcb7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee06d2486ad57dcd6368958613d466c55cab0274cee251e869c77e7ca78eb9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cb8d770c695ed6b657d883fc2ab7886409322c493091b02986fe4531dfad3a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b61ea23a35c79fcde906bd1219f77e3bcca629c20085b3f31ffd9712080939c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b61ea23a35c79fcde906bd1219f77e3bcca629c20085b3f31ffd9712080939c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.198616 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.198654 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.198666 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.198680 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.198689 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:27Z","lastTransitionTime":"2025-12-03T07:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.207555 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.220773 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.234428 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.246366 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.258497 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.273414 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e2d8f268ad6bc13c478d54774c1d91ca7f7512b0f5c675f2a360be64babac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:21Z\\\",\\\"message\\\":\\\"2025-12-03T07:27:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_bcd94373-94a0-4a19-88d3-1a5ccd4f6f64\\\\n2025-12-03T07:27:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_bcd94373-94a0-4a19-88d3-1a5ccd4f6f64 to /host/opt/cni/bin/\\\\n2025-12-03T07:27:36Z [verbose] multus-daemon started\\\\n2025-12-03T07:27:36Z [verbose] Readiness Indicator file check\\\\n2025-12-03T07:28:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.294693 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:04Z\\\",\\\"message\\\":\\\"sful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nF1203 07:28:03.909758 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z]\\\\nI1203 07:28:03.909770 6172 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nI1203 07:28:03.909772 6172 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 07:28:03.909778 6172 lb_config.go:1031] Cluster endpoints for openshift-kube-apiserver-operator/metrics\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:28:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.300418 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.300457 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.300473 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.300490 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.300502 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:27Z","lastTransitionTime":"2025-12-03T07:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.303444 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:27Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.402089 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.402115 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.402124 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.402137 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.402145 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:27Z","lastTransitionTime":"2025-12-03T07:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.505071 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.505117 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.505133 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.505154 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.505171 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:27Z","lastTransitionTime":"2025-12-03T07:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.607764 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.607807 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.607816 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.607832 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.607842 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:27Z","lastTransitionTime":"2025-12-03T07:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.710262 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.710296 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.710306 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.710317 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.710325 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:27Z","lastTransitionTime":"2025-12-03T07:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.813108 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.813144 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.813152 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.813167 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.813178 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:27Z","lastTransitionTime":"2025-12-03T07:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.915438 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.915491 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.915503 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.915529 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:27 crc kubenswrapper[4612]: I1203 07:28:27.915543 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:27Z","lastTransitionTime":"2025-12-03T07:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.017393 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.017427 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.017436 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.017449 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.017458 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:28Z","lastTransitionTime":"2025-12-03T07:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.088679 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:28 crc kubenswrapper[4612]: E1203 07:28:28.088836 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.119524 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.119553 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.119561 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.119575 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.119583 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:28Z","lastTransitionTime":"2025-12-03T07:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.221495 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.221526 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.221536 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.221554 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.221567 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:28Z","lastTransitionTime":"2025-12-03T07:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.324509 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.324547 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.324555 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.324570 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.324582 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:28Z","lastTransitionTime":"2025-12-03T07:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.426854 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.426898 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.426909 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.426927 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.426967 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:28Z","lastTransitionTime":"2025-12-03T07:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.529782 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.529812 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.529822 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.529852 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.529864 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:28Z","lastTransitionTime":"2025-12-03T07:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.632594 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.632645 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.632656 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.632672 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.632681 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:28Z","lastTransitionTime":"2025-12-03T07:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.734676 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.734735 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.734754 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.734778 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.734796 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:28Z","lastTransitionTime":"2025-12-03T07:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.837059 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.837084 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.837092 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.837103 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.837111 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:28Z","lastTransitionTime":"2025-12-03T07:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.939051 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.939088 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.939100 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.939116 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:28 crc kubenswrapper[4612]: I1203 07:28:28.939125 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:28Z","lastTransitionTime":"2025-12-03T07:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.041903 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.041983 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.041994 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.042014 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.042027 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:29Z","lastTransitionTime":"2025-12-03T07:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.089541 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.089580 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.089546 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:29 crc kubenswrapper[4612]: E1203 07:28:29.089724 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:29 crc kubenswrapper[4612]: E1203 07:28:29.089827 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:29 crc kubenswrapper[4612]: E1203 07:28:29.089929 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.128017 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.128050 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.128058 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.128071 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.128080 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:29Z","lastTransitionTime":"2025-12-03T07:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:29 crc kubenswrapper[4612]: E1203 07:28:29.140302 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:29Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.143207 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.143268 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.143280 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.143294 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.143304 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:29Z","lastTransitionTime":"2025-12-03T07:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:29 crc kubenswrapper[4612]: E1203 07:28:29.153531 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:29Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.155731 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.155768 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.155777 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.155792 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.155802 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:29Z","lastTransitionTime":"2025-12-03T07:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:29 crc kubenswrapper[4612]: E1203 07:28:29.165654 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:29Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.168486 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.168514 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.168523 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.168536 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.168546 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:29Z","lastTransitionTime":"2025-12-03T07:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:29 crc kubenswrapper[4612]: E1203 07:28:29.179739 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:29Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.182541 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.182574 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.182582 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.182626 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.182638 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:29Z","lastTransitionTime":"2025-12-03T07:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:29 crc kubenswrapper[4612]: E1203 07:28:29.196373 4612 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a3bb5445-5882-4bd7-a327-29d2aa687210\\\",\\\"systemUUID\\\":\\\"c9eb3301-3f55-4399-abc8-6d4892c05918\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:29Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:29 crc kubenswrapper[4612]: E1203 07:28:29.196527 4612 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.198018 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.198060 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.198074 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.198092 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.198103 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:29Z","lastTransitionTime":"2025-12-03T07:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.300583 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.300625 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.300635 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.300653 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.300666 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:29Z","lastTransitionTime":"2025-12-03T07:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.403499 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.403560 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.403578 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.403602 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.403620 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:29Z","lastTransitionTime":"2025-12-03T07:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.505635 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.505673 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.505682 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.505696 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.505708 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:29Z","lastTransitionTime":"2025-12-03T07:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.608207 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.608284 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.608307 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.608346 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.608369 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:29Z","lastTransitionTime":"2025-12-03T07:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.711207 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.711251 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.711261 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.711277 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.711288 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:29Z","lastTransitionTime":"2025-12-03T07:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.814634 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.814719 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.814743 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.814776 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.814795 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:29Z","lastTransitionTime":"2025-12-03T07:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.917654 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.917721 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.917747 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.917779 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:29 crc kubenswrapper[4612]: I1203 07:28:29.917803 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:29Z","lastTransitionTime":"2025-12-03T07:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.020741 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.020783 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.020792 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.020805 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.020815 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:30Z","lastTransitionTime":"2025-12-03T07:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.089185 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:30 crc kubenswrapper[4612]: E1203 07:28:30.089392 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.124324 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.124402 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.124430 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.124460 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.124483 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:30Z","lastTransitionTime":"2025-12-03T07:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.228361 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.228442 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.228469 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.228503 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.228526 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:30Z","lastTransitionTime":"2025-12-03T07:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.332397 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.332452 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.332470 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.332495 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.332515 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:30Z","lastTransitionTime":"2025-12-03T07:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.435929 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.436005 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.436022 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.436045 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.436062 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:30Z","lastTransitionTime":"2025-12-03T07:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.539224 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.539315 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.539370 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.539395 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.539412 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:30Z","lastTransitionTime":"2025-12-03T07:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.642334 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.642397 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.642409 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.642425 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.642436 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:30Z","lastTransitionTime":"2025-12-03T07:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.745710 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.745776 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.745790 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.745817 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.745839 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:30Z","lastTransitionTime":"2025-12-03T07:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.848320 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.848379 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.848393 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.848406 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.848414 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:30Z","lastTransitionTime":"2025-12-03T07:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.951580 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.951626 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.951639 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.951652 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:30 crc kubenswrapper[4612]: I1203 07:28:30.951660 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:30Z","lastTransitionTime":"2025-12-03T07:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.054453 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.054561 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.054585 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.054611 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.054676 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:31Z","lastTransitionTime":"2025-12-03T07:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.089438 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:31 crc kubenswrapper[4612]: E1203 07:28:31.089538 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.089685 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:31 crc kubenswrapper[4612]: E1203 07:28:31.089731 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.089913 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:31 crc kubenswrapper[4612]: E1203 07:28:31.089989 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.157449 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.157496 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.157504 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.157522 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.157533 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:31Z","lastTransitionTime":"2025-12-03T07:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.261430 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.261485 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.261496 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.261517 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.261529 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:31Z","lastTransitionTime":"2025-12-03T07:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.365439 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.365518 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.365542 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.365577 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.365600 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:31Z","lastTransitionTime":"2025-12-03T07:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.467701 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.467752 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.467765 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.467784 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.467793 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:31Z","lastTransitionTime":"2025-12-03T07:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.570008 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.570036 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.570044 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.570056 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.570065 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:31Z","lastTransitionTime":"2025-12-03T07:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.672990 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.673026 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.673037 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.673055 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.673068 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:31Z","lastTransitionTime":"2025-12-03T07:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.775504 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.775616 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.775633 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.775673 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.775684 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:31Z","lastTransitionTime":"2025-12-03T07:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.878451 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.878489 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.878500 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.878516 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.878530 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:31Z","lastTransitionTime":"2025-12-03T07:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.982035 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.982085 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.982100 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.982119 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:31 crc kubenswrapper[4612]: I1203 07:28:31.982134 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:31Z","lastTransitionTime":"2025-12-03T07:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.085677 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.085733 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.085750 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.085775 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.085793 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:32Z","lastTransitionTime":"2025-12-03T07:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.089212 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:32 crc kubenswrapper[4612]: E1203 07:28:32.089763 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.090325 4612 scope.go:117] "RemoveContainer" containerID="f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.188381 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.188619 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.188627 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.188640 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.188650 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:32Z","lastTransitionTime":"2025-12-03T07:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.292155 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.292201 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.292218 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.292240 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.292257 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:32Z","lastTransitionTime":"2025-12-03T07:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.394863 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.394918 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.394937 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.394998 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.395016 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:32Z","lastTransitionTime":"2025-12-03T07:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.498274 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.498334 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.498351 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.498377 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.498396 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:32Z","lastTransitionTime":"2025-12-03T07:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.602080 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.602123 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.602171 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.602194 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.602210 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:32Z","lastTransitionTime":"2025-12-03T07:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.705264 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.705292 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.705411 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.705423 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.705432 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:32Z","lastTransitionTime":"2025-12-03T07:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.808068 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.808101 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.808133 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.808156 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.808169 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:32Z","lastTransitionTime":"2025-12-03T07:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.910907 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.910996 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.911010 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.911025 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:32 crc kubenswrapper[4612]: I1203 07:28:32.911035 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:32Z","lastTransitionTime":"2025-12-03T07:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.013515 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.013549 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.013560 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.013575 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.013586 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:33Z","lastTransitionTime":"2025-12-03T07:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.088458 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:33 crc kubenswrapper[4612]: E1203 07:28:33.088559 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.088579 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:33 crc kubenswrapper[4612]: E1203 07:28:33.088656 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.088461 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:33 crc kubenswrapper[4612]: E1203 07:28:33.088730 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.115419 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.115457 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.115468 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.115482 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.115492 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:33Z","lastTransitionTime":"2025-12-03T07:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.218188 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.218219 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.218230 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.218244 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.218255 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:33Z","lastTransitionTime":"2025-12-03T07:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.321613 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.321654 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.321664 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.321680 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.321689 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:33Z","lastTransitionTime":"2025-12-03T07:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.424039 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.424077 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.424087 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.424101 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.424112 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:33Z","lastTransitionTime":"2025-12-03T07:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.517515 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovnkube-controller/3.log" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.518444 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovnkube-controller/2.log" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.521801 4612 generic.go:334] "Generic (PLEG): container finished" podID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerID="77b7a58278abf172c403f8814db67503b55ef064c7bf6763c010234431a10b74" exitCode=1 Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.521860 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerDied","Data":"77b7a58278abf172c403f8814db67503b55ef064c7bf6763c010234431a10b74"} Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.521897 4612 scope.go:117] "RemoveContainer" containerID="f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.523356 4612 scope.go:117] "RemoveContainer" containerID="77b7a58278abf172c403f8814db67503b55ef064c7bf6763c010234431a10b74" Dec 03 07:28:33 crc kubenswrapper[4612]: E1203 07:28:33.523666 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.530898 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.531118 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.531253 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.531396 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.531541 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:33Z","lastTransitionTime":"2025-12-03T07:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.539888 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.554410 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.566490 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.577547 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5422811f-15b4-4f15-ba7c-08620834d727\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b14431f707f446113fc666ec20580efa40e4527ea12f5bd9f655b26f1dcb7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee06d2486ad57dcd6368958613d466c55cab0274cee251e869c77e7ca78eb9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cb8d770c695ed6b657d883fc2ab7886409322c493091b02986fe4531dfad3a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b61ea23a35c79fcde906bd1219f77e3bcca629c20085b3f31ffd9712080939c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b61ea23a35c79fcde906bd1219f77e3bcca629c20085b3f31ffd9712080939c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.592025 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.606612 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.616001 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.626852 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.633830 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.633863 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.633872 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.633886 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.633898 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:33Z","lastTransitionTime":"2025-12-03T07:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.643146 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.654164 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.665480 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e2d8f268ad6bc13c478d54774c1d91ca7f7512b0f5c675f2a360be64babac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:21Z\\\",\\\"message\\\":\\\"2025-12-03T07:27:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_bcd94373-94a0-4a19-88d3-1a5ccd4f6f64\\\\n2025-12-03T07:27:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_bcd94373-94a0-4a19-88d3-1a5ccd4f6f64 to /host/opt/cni/bin/\\\\n2025-12-03T07:27:36Z [verbose] multus-daemon started\\\\n2025-12-03T07:27:36Z [verbose] Readiness Indicator file check\\\\n2025-12-03T07:28:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.684155 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77b7a58278abf172c403f8814db67503b55ef064c7bf6763c010234431a10b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:04Z\\\",\\\"message\\\":\\\"sful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nF1203 07:28:03.909758 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z]\\\\nI1203 07:28:03.909770 6172 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nI1203 07:28:03.909772 6172 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 07:28:03.909778 6172 lb_config.go:1031] Cluster endpoints for openshift-kube-apiserver-operator/metrics\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:28:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b7a58278abf172c403f8814db67503b55ef064c7bf6763c010234431a10b74\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:33Z\\\",\\\"message\\\":\\\"ubernetes/ovnkube-control-plane-749d76644c-lg276\\\\nI1203 07:28:33.220399 6520 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1203 07:28:33.220415 6520 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276 in node crc\\\\nI1203 07:28:33.220419 6520 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1203 07:28:33.220427 6520 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1203 07:28:33.220433 6520 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1203 07:28:33.220440 6520 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:28:33.220447 6520 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:28:33.220451 6520 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nF1203 07:28:33.220461 6520 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.698685 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.710709 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.724509 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.736327 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.736362 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.736371 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.736386 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.736398 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:33Z","lastTransitionTime":"2025-12-03T07:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.740891 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.751507 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:33Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.839202 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.839243 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.839257 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.839276 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.839290 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:33Z","lastTransitionTime":"2025-12-03T07:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.942222 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.942251 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.942261 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.942278 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:33 crc kubenswrapper[4612]: I1203 07:28:33.942289 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:33Z","lastTransitionTime":"2025-12-03T07:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.045546 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.045603 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.045622 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.045647 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.045665 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:34Z","lastTransitionTime":"2025-12-03T07:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.089229 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:34 crc kubenswrapper[4612]: E1203 07:28:34.089351 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.149129 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.149167 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.149174 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.149189 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.149198 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:34Z","lastTransitionTime":"2025-12-03T07:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.252394 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.252458 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.252480 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.252509 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.252529 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:34Z","lastTransitionTime":"2025-12-03T07:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.355995 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.356049 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.356059 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.356075 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.356086 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:34Z","lastTransitionTime":"2025-12-03T07:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.458283 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.458388 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.458445 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.458472 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.458507 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:34Z","lastTransitionTime":"2025-12-03T07:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.526108 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovnkube-controller/3.log" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.561583 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.562512 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.562530 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.562559 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.562571 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:34Z","lastTransitionTime":"2025-12-03T07:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.669486 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.669525 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.669534 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.669548 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.669557 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:34Z","lastTransitionTime":"2025-12-03T07:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.771674 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.771714 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.771726 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.771742 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.771754 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:34Z","lastTransitionTime":"2025-12-03T07:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.874404 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.874659 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.874745 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.874854 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.874963 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:34Z","lastTransitionTime":"2025-12-03T07:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.979161 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.979257 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.979287 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.979323 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:34 crc kubenswrapper[4612]: I1203 07:28:34.979347 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:34Z","lastTransitionTime":"2025-12-03T07:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.082013 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.082050 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.082058 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.082072 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.082081 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:35Z","lastTransitionTime":"2025-12-03T07:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.088481 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.088566 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:35 crc kubenswrapper[4612]: E1203 07:28:35.088671 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.088704 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:35 crc kubenswrapper[4612]: E1203 07:28:35.088811 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:35 crc kubenswrapper[4612]: E1203 07:28:35.088940 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.184306 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.184343 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.184350 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.184364 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.184374 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:35Z","lastTransitionTime":"2025-12-03T07:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.286553 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.286592 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.286602 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.286617 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.286628 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:35Z","lastTransitionTime":"2025-12-03T07:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.389394 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.389432 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.389440 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.389454 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.389464 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:35Z","lastTransitionTime":"2025-12-03T07:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.492148 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.492200 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.492216 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.492238 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.492254 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:35Z","lastTransitionTime":"2025-12-03T07:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.594150 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.594184 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.594192 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.594205 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.594214 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:35Z","lastTransitionTime":"2025-12-03T07:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.696825 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.696864 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.696887 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.696902 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.696911 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:35Z","lastTransitionTime":"2025-12-03T07:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.800926 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.801575 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.801592 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.801617 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.801632 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:35Z","lastTransitionTime":"2025-12-03T07:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.904283 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.904341 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.904352 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.904371 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:35 crc kubenswrapper[4612]: I1203 07:28:35.904384 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:35Z","lastTransitionTime":"2025-12-03T07:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.007492 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.007986 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.008206 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.008387 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.008560 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:36Z","lastTransitionTime":"2025-12-03T07:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.089124 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:36 crc kubenswrapper[4612]: E1203 07:28:36.089631 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.111661 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.111717 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.111729 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.111753 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.111763 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:36Z","lastTransitionTime":"2025-12-03T07:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.214528 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.214586 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.214602 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.214626 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.214641 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:36Z","lastTransitionTime":"2025-12-03T07:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.317391 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.317671 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.317762 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.317849 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.317932 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:36Z","lastTransitionTime":"2025-12-03T07:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.420496 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.420734 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.420825 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.420909 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.420996 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:36Z","lastTransitionTime":"2025-12-03T07:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.523773 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.523832 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.523850 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.523875 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.523893 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:36Z","lastTransitionTime":"2025-12-03T07:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.626267 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.626530 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.626597 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.626663 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.626728 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:36Z","lastTransitionTime":"2025-12-03T07:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.730901 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.730967 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.730980 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.730999 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.731012 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:36Z","lastTransitionTime":"2025-12-03T07:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.834002 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.834099 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.834119 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.834146 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.834165 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:36Z","lastTransitionTime":"2025-12-03T07:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.938236 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.938321 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.938340 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.938368 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:36 crc kubenswrapper[4612]: I1203 07:28:36.938385 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:36Z","lastTransitionTime":"2025-12-03T07:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.041103 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.041189 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.041207 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.041232 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.041250 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:37Z","lastTransitionTime":"2025-12-03T07:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.088850 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.088978 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.089112 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:37 crc kubenswrapper[4612]: E1203 07:28:37.089136 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:37 crc kubenswrapper[4612]: E1203 07:28:37.089253 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:37 crc kubenswrapper[4612]: E1203 07:28:37.090001 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.109690 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5422811f-15b4-4f15-ba7c-08620834d727\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b14431f707f446113fc666ec20580efa40e4527ea12f5bd9f655b26f1dcb7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee06d2486ad57dcd6368958613d466c55cab0274cee251e869c77e7ca78eb9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cb8d770c695ed6b657d883fc2ab7886409322c493091b02986fe4531dfad3a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b61ea23a35c79fcde906bd1219f77e3bcca629c20085b3f31ffd9712080939c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b61ea23a35c79fcde906bd1219f77e3bcca629c20085b3f31ffd9712080939c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.125096 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.138679 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1845c364d4db1cae9b0f621ba3ae018d2c71e6d0a18b6d15cd55424bdeccdfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8487c2266a7473fa2bb42ab37125eef2d3c682732ec4ae29c9db2bd112308f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.143373 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.143423 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.143441 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.143464 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.143483 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:37Z","lastTransitionTime":"2025-12-03T07:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.152849 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b21e1e7-43e1-4a25-a0ba-a7b6948a6108\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38aa871aa420fa972729295c2fde82a7ebf75fe897f0e5acbe326ab7d0a2d2c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fd81160400c637a57f4143d8b2147b95a4701a6114d4c0fc37a9078699645ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85a98382cd1fb7658826e2afa7bcb0e019b50b13b5b96f69623259e1ab384a93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.176268 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630b35385e147fc6810c5042198373041653e7c54e722ef3eb8d25896c6920e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.196805 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p52kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e2d8f268ad6bc13c478d54774c1d91ca7f7512b0f5c675f2a360be64babac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:21Z\\\",\\\"message\\\":\\\"2025-12-03T07:27:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_bcd94373-94a0-4a19-88d3-1a5ccd4f6f64\\\\n2025-12-03T07:27:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_bcd94373-94a0-4a19-88d3-1a5ccd4f6f64 to /host/opt/cni/bin/\\\\n2025-12-03T07:27:36Z [verbose] multus-daemon started\\\\n2025-12-03T07:27:36Z [verbose] Readiness Indicator file check\\\\n2025-12-03T07:28:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4prvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p52kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.218004 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64b21a08-7c39-4c31-a34d-88e74edf88c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77b7a58278abf172c403f8814db67503b55ef064c7bf6763c010234431a10b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f44752bcc8f9d84815c03d58be38facd03acfeb35026adb54491ded921c8dd8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:04Z\\\",\\\"message\\\":\\\"sful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nF1203 07:28:03.909758 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:03Z is after 2025-08-24T17:21:41Z]\\\\nI1203 07:28:03.909770 6172 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nI1203 07:28:03.909772 6172 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 07:28:03.909778 6172 lb_config.go:1031] Cluster endpoints for openshift-kube-apiserver-operator/metrics\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:28:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77b7a58278abf172c403f8814db67503b55ef064c7bf6763c010234431a10b74\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T07:28:33Z\\\",\\\"message\\\":\\\"ubernetes/ovnkube-control-plane-749d76644c-lg276\\\\nI1203 07:28:33.220399 6520 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1203 07:28:33.220415 6520 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276 in node crc\\\\nI1203 07:28:33.220419 6520 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1203 07:28:33.220427 6520 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1203 07:28:33.220433 6520 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1203 07:28:33.220440 6520 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:28:33.220447 6520 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1203 07:28:33.220451 6520 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nF1203 07:28:33.220461 6520 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ksmrn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9krtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.230570 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b2zhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02de6f0d-8266-45f0-87f3-8bc2da2e61d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6767ac1160e00d9b1b9abbc9d75d567416b3c2e305b1b9ad668ace4689f2de92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7zmcg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b2zhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.245025 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"020a82cd-f275-41b6-af72-0de61a31fc0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca34b77cac58da655034029b11ea11425d0b23c568836aa75aa8221c1ed2b550\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69a245981fe7139d4cd59440318e24da2ae75f73dfed513d05178edd66989c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dkflz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lg276\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.246209 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.246240 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.246248 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.246261 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.246270 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:37Z","lastTransitionTime":"2025-12-03T07:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.257458 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.269819 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.284281 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7xg44" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b8cd268-ef6d-4c13-a726-f7e6a9beec58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be4d03c1563acb427af61652541530d465f02853a445887c7f7f625f3a202f2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdc4e5f2c9e1bd24329f09cfef0fd3b3895caf2544710a5c34cb016ee5aae16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be1ecd8ff9a611e21ba1705c51ea933207ea0b80f2bc9b5e9c183aab496d9154\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://792c73bbc0d335c153be60623dcfc15ebba9c85070dbe02958fbd715c8f1ea22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91ea7e30babe2a9b7fcb6ed9eddce92a70b88141f64e33ef43437ee9545376c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d376dc608c98e1ca275096dc076ec170a27a7681799100977e2c8e9c396ea45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b75bf39b0eaf8e821f273785df9242491f24cbe0116b52b86dba17961157f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nx5w9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7xg44\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.293587 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5g4hj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1007d628-c3fc-4ecf-a0af-86c406fd2290\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9d07029bc0fff4d32507704bb2a6f799be7319a9732ab81b6c02e77e14050fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lvzld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5g4hj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.302564 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl4t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8m4gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.314936 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bb6f136-2fb9-4002-ad0d-206b8e43c6ea\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T07:27:34Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 07:27:34.177488 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 07:27:34.177789 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 07:27:34.179783 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2726254371/tls.crt::/tmp/serving-cert-2726254371/tls.key\\\\\\\"\\\\nI1203 07:27:34.565026 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 07:27:34.581963 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 07:27:34.582029 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 07:27:34.582069 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 07:27:34.582076 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 07:27:34.599281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 07:27:34.599312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599318 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 07:27:34.599323 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 07:27:34.599326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1203 07:27:34.599330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 07:27:34.599350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 07:27:34.599356 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 07:27:34.603753 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T07:27:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T07:27:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:17Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.325381 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f5ef465fd9f603a587da93a0d6a7e95a15e160c8fd47f89c81adf70ece802a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.335506 4612 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bf87dca8-ceaa-424a-8074-7a63c648b84b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T07:27:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4297d96a2eb8d1533b09d6606018da8575f0686ec9223117b48a9a8ea00d7e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T07:27:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w26dm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T07:27:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d8td2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T07:28:37Z is after 2025-08-24T17:21:41Z" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.347785 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.347821 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.347858 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.347919 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.347932 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:37Z","lastTransitionTime":"2025-12-03T07:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.450936 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.451010 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.451026 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.451049 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.451065 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:37Z","lastTransitionTime":"2025-12-03T07:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.553201 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.553552 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.553737 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.553884 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.554086 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:37Z","lastTransitionTime":"2025-12-03T07:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.657227 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.658309 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.658343 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.658373 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.658474 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:37Z","lastTransitionTime":"2025-12-03T07:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.762415 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.762476 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.762485 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.762500 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.762509 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:37Z","lastTransitionTime":"2025-12-03T07:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.865475 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.865541 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.865562 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.865590 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.865611 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:37Z","lastTransitionTime":"2025-12-03T07:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.968237 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.968336 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.968349 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.968366 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:37 crc kubenswrapper[4612]: I1203 07:28:37.968377 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:37Z","lastTransitionTime":"2025-12-03T07:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.071496 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.071536 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.071547 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.071565 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.071578 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:38Z","lastTransitionTime":"2025-12-03T07:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.089152 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:38 crc kubenswrapper[4612]: E1203 07:28:38.089374 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.100001 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.174787 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.174824 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.174836 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.174856 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.174868 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:38Z","lastTransitionTime":"2025-12-03T07:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.277989 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.278032 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.278045 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.278063 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.278075 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:38Z","lastTransitionTime":"2025-12-03T07:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.380582 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.380861 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.380978 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.381091 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.381183 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:38Z","lastTransitionTime":"2025-12-03T07:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.484484 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.484511 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.484519 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.484531 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.484539 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:38Z","lastTransitionTime":"2025-12-03T07:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.587318 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.587342 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.587350 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.587362 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.587371 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:38Z","lastTransitionTime":"2025-12-03T07:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.690038 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.690099 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.690119 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.690145 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.690162 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:38Z","lastTransitionTime":"2025-12-03T07:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.792887 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.792926 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.792961 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.792982 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.792996 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:38Z","lastTransitionTime":"2025-12-03T07:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.895145 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.895182 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.895194 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.895212 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.895232 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:38Z","lastTransitionTime":"2025-12-03T07:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.944935 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:28:38 crc kubenswrapper[4612]: E1203 07:28:38.945188 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:42.945160023 +0000 UTC m=+146.118517423 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.945366 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.945530 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.945562 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:38 crc kubenswrapper[4612]: E1203 07:28:38.945579 4612 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:28:38 crc kubenswrapper[4612]: E1203 07:28:38.945665 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:29:42.945640505 +0000 UTC m=+146.118997915 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 07:28:38 crc kubenswrapper[4612]: E1203 07:28:38.945668 4612 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:28:38 crc kubenswrapper[4612]: E1203 07:28:38.945709 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 07:29:42.945698457 +0000 UTC m=+146.119055867 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 07:28:38 crc kubenswrapper[4612]: E1203 07:28:38.945750 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:28:38 crc kubenswrapper[4612]: E1203 07:28:38.945767 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:28:38 crc kubenswrapper[4612]: E1203 07:28:38.945780 4612 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:28:38 crc kubenswrapper[4612]: E1203 07:28:38.945818 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 07:29:42.94581029 +0000 UTC m=+146.119167810 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.998410 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.998821 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.999000 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.999134 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:38 crc kubenswrapper[4612]: I1203 07:28:38.999227 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:38Z","lastTransitionTime":"2025-12-03T07:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.046376 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:39 crc kubenswrapper[4612]: E1203 07:28:39.046545 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 07:28:39 crc kubenswrapper[4612]: E1203 07:28:39.046561 4612 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 07:28:39 crc kubenswrapper[4612]: E1203 07:28:39.046571 4612 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:28:39 crc kubenswrapper[4612]: E1203 07:28:39.046631 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 07:29:43.046612549 +0000 UTC m=+146.219969949 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.088762 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:39 crc kubenswrapper[4612]: E1203 07:28:39.088878 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.088769 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.088766 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:39 crc kubenswrapper[4612]: E1203 07:28:39.088954 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:39 crc kubenswrapper[4612]: E1203 07:28:39.089133 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.101198 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.101273 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.101286 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.101304 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.101314 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:39Z","lastTransitionTime":"2025-12-03T07:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.203446 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.203754 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.204045 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.204247 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.204618 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:39Z","lastTransitionTime":"2025-12-03T07:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.307136 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.307208 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.307227 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.307257 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.307283 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:39Z","lastTransitionTime":"2025-12-03T07:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.411237 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.411718 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.411745 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.411778 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.411800 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:39Z","lastTransitionTime":"2025-12-03T07:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.474731 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.475220 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.475689 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.475971 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.476113 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:39Z","lastTransitionTime":"2025-12-03T07:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.692020 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.692045 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.692056 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.692068 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.692079 4612 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T07:28:39Z","lastTransitionTime":"2025-12-03T07:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.729238 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68"] Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.729666 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.735211 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.735479 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.735689 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.735871 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.803792 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-7xg44" podStartSLOduration=64.803770916 podStartE2EDuration="1m4.803770916s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:28:39.789371438 +0000 UTC m=+82.962728848" watchObservedRunningTime="2025-12-03 07:28:39.803770916 +0000 UTC m=+82.977128316" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.818654 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-5g4hj" podStartSLOduration=65.818631107 podStartE2EDuration="1m5.818631107s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:28:39.804202388 +0000 UTC m=+82.977559788" watchObservedRunningTime="2025-12-03 07:28:39.818631107 +0000 UTC m=+82.991988507" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.846886 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=64.846866879 podStartE2EDuration="1m4.846866879s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:28:39.836777564 +0000 UTC m=+83.010134964" watchObservedRunningTime="2025-12-03 07:28:39.846866879 +0000 UTC m=+83.020224279" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.854736 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/74457474-4cf4-4bda-9f36-a25e02bbb21b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.854944 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74457474-4cf4-4bda-9f36-a25e02bbb21b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.855069 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/74457474-4cf4-4bda-9f36-a25e02bbb21b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.855172 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74457474-4cf4-4bda-9f36-a25e02bbb21b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.855282 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74457474-4cf4-4bda-9f36-a25e02bbb21b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.867818 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=1.8677989990000001 podStartE2EDuration="1.867798999s" podCreationTimestamp="2025-12-03 07:28:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:28:39.846859509 +0000 UTC m=+83.020216909" watchObservedRunningTime="2025-12-03 07:28:39.867798999 +0000 UTC m=+83.041156409" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.881506 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podStartSLOduration=65.881476428 podStartE2EDuration="1m5.881476428s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:28:39.880361119 +0000 UTC m=+83.053718519" watchObservedRunningTime="2025-12-03 07:28:39.881476428 +0000 UTC m=+83.054833828" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.908858 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=30.908839287 podStartE2EDuration="30.908839287s" podCreationTimestamp="2025-12-03 07:28:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:28:39.89333669 +0000 UTC m=+83.066694090" watchObservedRunningTime="2025-12-03 07:28:39.908839287 +0000 UTC m=+83.082196687" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.939616 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=58.939582125 podStartE2EDuration="58.939582125s" podCreationTimestamp="2025-12-03 07:27:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:28:39.939572965 +0000 UTC m=+83.112930375" watchObservedRunningTime="2025-12-03 07:28:39.939582125 +0000 UTC m=+83.112939525" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.956728 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/74457474-4cf4-4bda-9f36-a25e02bbb21b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.956989 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74457474-4cf4-4bda-9f36-a25e02bbb21b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.957126 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/74457474-4cf4-4bda-9f36-a25e02bbb21b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.956847 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/74457474-4cf4-4bda-9f36-a25e02bbb21b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.957324 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/74457474-4cf4-4bda-9f36-a25e02bbb21b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.957223 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74457474-4cf4-4bda-9f36-a25e02bbb21b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.957503 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74457474-4cf4-4bda-9f36-a25e02bbb21b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.957966 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74457474-4cf4-4bda-9f36-a25e02bbb21b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.970472 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74457474-4cf4-4bda-9f36-a25e02bbb21b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:39 crc kubenswrapper[4612]: I1203 07:28:39.974561 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74457474-4cf4-4bda-9f36-a25e02bbb21b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rrl68\" (UID: \"74457474-4cf4-4bda-9f36-a25e02bbb21b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:40 crc kubenswrapper[4612]: I1203 07:28:40.007351 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-p52kb" podStartSLOduration=65.007328786 podStartE2EDuration="1m5.007328786s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:28:39.978711713 +0000 UTC m=+83.152069113" watchObservedRunningTime="2025-12-03 07:28:40.007328786 +0000 UTC m=+83.180686196" Dec 03 07:28:40 crc kubenswrapper[4612]: I1203 07:28:40.045100 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" Dec 03 07:28:40 crc kubenswrapper[4612]: I1203 07:28:40.066359 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-b2zhj" podStartSLOduration=66.066320036 podStartE2EDuration="1m6.066320036s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:28:40.035212708 +0000 UTC m=+83.208570108" watchObservedRunningTime="2025-12-03 07:28:40.066320036 +0000 UTC m=+83.239677456" Dec 03 07:28:40 crc kubenswrapper[4612]: I1203 07:28:40.066670 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lg276" podStartSLOduration=65.066663555 podStartE2EDuration="1m5.066663555s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:28:40.065595947 +0000 UTC m=+83.238953347" watchObservedRunningTime="2025-12-03 07:28:40.066663555 +0000 UTC m=+83.240020955" Dec 03 07:28:40 crc kubenswrapper[4612]: I1203 07:28:40.096253 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:40 crc kubenswrapper[4612]: E1203 07:28:40.096430 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:40 crc kubenswrapper[4612]: I1203 07:28:40.550609 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" event={"ID":"74457474-4cf4-4bda-9f36-a25e02bbb21b","Type":"ContainerStarted","Data":"a9ce2ac315ddca6533681638752c1b192260918fea5056bff7e6b6395308c664"} Dec 03 07:28:40 crc kubenswrapper[4612]: I1203 07:28:40.550994 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" event={"ID":"74457474-4cf4-4bda-9f36-a25e02bbb21b","Type":"ContainerStarted","Data":"d72fd8070c2012fb7ef99e4654bb426a994b0b22b2e020eba01f35040e0a4ab1"} Dec 03 07:28:41 crc kubenswrapper[4612]: I1203 07:28:41.088580 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:41 crc kubenswrapper[4612]: I1203 07:28:41.088675 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:41 crc kubenswrapper[4612]: E1203 07:28:41.090395 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:41 crc kubenswrapper[4612]: I1203 07:28:41.088604 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:41 crc kubenswrapper[4612]: E1203 07:28:41.092222 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:41 crc kubenswrapper[4612]: E1203 07:28:41.092463 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:42 crc kubenswrapper[4612]: I1203 07:28:42.088487 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:42 crc kubenswrapper[4612]: E1203 07:28:42.088645 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:43 crc kubenswrapper[4612]: I1203 07:28:43.088830 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:43 crc kubenswrapper[4612]: I1203 07:28:43.088910 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:43 crc kubenswrapper[4612]: I1203 07:28:43.088961 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:43 crc kubenswrapper[4612]: E1203 07:28:43.089003 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:43 crc kubenswrapper[4612]: E1203 07:28:43.089076 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:43 crc kubenswrapper[4612]: E1203 07:28:43.089196 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:44 crc kubenswrapper[4612]: I1203 07:28:44.089359 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:44 crc kubenswrapper[4612]: E1203 07:28:44.090068 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:45 crc kubenswrapper[4612]: I1203 07:28:45.089191 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:45 crc kubenswrapper[4612]: E1203 07:28:45.089309 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:45 crc kubenswrapper[4612]: I1203 07:28:45.089191 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:45 crc kubenswrapper[4612]: E1203 07:28:45.089454 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:45 crc kubenswrapper[4612]: I1203 07:28:45.090353 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:45 crc kubenswrapper[4612]: E1203 07:28:45.090648 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:46 crc kubenswrapper[4612]: I1203 07:28:46.088794 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:46 crc kubenswrapper[4612]: E1203 07:28:46.089200 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:47 crc kubenswrapper[4612]: I1203 07:28:47.088935 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:47 crc kubenswrapper[4612]: I1203 07:28:47.088939 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:47 crc kubenswrapper[4612]: I1203 07:28:47.089049 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:47 crc kubenswrapper[4612]: E1203 07:28:47.090713 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:47 crc kubenswrapper[4612]: E1203 07:28:47.091285 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:47 crc kubenswrapper[4612]: I1203 07:28:47.091587 4612 scope.go:117] "RemoveContainer" containerID="77b7a58278abf172c403f8814db67503b55ef064c7bf6763c010234431a10b74" Dec 03 07:28:47 crc kubenswrapper[4612]: E1203 07:28:47.091734 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" Dec 03 07:28:47 crc kubenswrapper[4612]: E1203 07:28:47.091976 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:47 crc kubenswrapper[4612]: I1203 07:28:47.124463 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rrl68" podStartSLOduration=73.124444545 podStartE2EDuration="1m13.124444545s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:28:40.566645654 +0000 UTC m=+83.740003064" watchObservedRunningTime="2025-12-03 07:28:47.124444545 +0000 UTC m=+90.297801965" Dec 03 07:28:48 crc kubenswrapper[4612]: I1203 07:28:48.089096 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:48 crc kubenswrapper[4612]: E1203 07:28:48.089237 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:49 crc kubenswrapper[4612]: I1203 07:28:49.089433 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:49 crc kubenswrapper[4612]: I1203 07:28:49.089488 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:49 crc kubenswrapper[4612]: I1203 07:28:49.089564 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:49 crc kubenswrapper[4612]: E1203 07:28:49.089632 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:49 crc kubenswrapper[4612]: E1203 07:28:49.090086 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:49 crc kubenswrapper[4612]: E1203 07:28:49.090201 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:50 crc kubenswrapper[4612]: I1203 07:28:50.089433 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:50 crc kubenswrapper[4612]: E1203 07:28:50.089633 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:51 crc kubenswrapper[4612]: I1203 07:28:51.088850 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:51 crc kubenswrapper[4612]: I1203 07:28:51.088929 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:51 crc kubenswrapper[4612]: I1203 07:28:51.088824 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:51 crc kubenswrapper[4612]: E1203 07:28:51.089118 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:51 crc kubenswrapper[4612]: E1203 07:28:51.089250 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:51 crc kubenswrapper[4612]: E1203 07:28:51.089403 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:51 crc kubenswrapper[4612]: I1203 07:28:51.599875 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:28:51 crc kubenswrapper[4612]: I1203 07:28:51.600859 4612 scope.go:117] "RemoveContainer" containerID="77b7a58278abf172c403f8814db67503b55ef064c7bf6763c010234431a10b74" Dec 03 07:28:51 crc kubenswrapper[4612]: E1203 07:28:51.601199 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" Dec 03 07:28:52 crc kubenswrapper[4612]: I1203 07:28:52.089012 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:52 crc kubenswrapper[4612]: E1203 07:28:52.089132 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:53 crc kubenswrapper[4612]: I1203 07:28:53.089977 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:53 crc kubenswrapper[4612]: E1203 07:28:53.090119 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:53 crc kubenswrapper[4612]: I1203 07:28:53.090439 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:53 crc kubenswrapper[4612]: I1203 07:28:53.090461 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:53 crc kubenswrapper[4612]: E1203 07:28:53.090515 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:53 crc kubenswrapper[4612]: E1203 07:28:53.090606 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:53 crc kubenswrapper[4612]: I1203 07:28:53.916868 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs\") pod \"network-metrics-daemon-8m4gl\" (UID: \"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\") " pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:53 crc kubenswrapper[4612]: E1203 07:28:53.917117 4612 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:28:53 crc kubenswrapper[4612]: E1203 07:28:53.917239 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs podName:fca5f46d-010f-4d46-8926-fd2a2cb9ee1e nodeName:}" failed. No retries permitted until 2025-12-03 07:29:57.917207279 +0000 UTC m=+161.090564709 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs") pod "network-metrics-daemon-8m4gl" (UID: "fca5f46d-010f-4d46-8926-fd2a2cb9ee1e") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 07:28:54 crc kubenswrapper[4612]: I1203 07:28:54.089094 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:54 crc kubenswrapper[4612]: E1203 07:28:54.089249 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:55 crc kubenswrapper[4612]: I1203 07:28:55.089438 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:55 crc kubenswrapper[4612]: I1203 07:28:55.089572 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:55 crc kubenswrapper[4612]: E1203 07:28:55.089685 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:55 crc kubenswrapper[4612]: I1203 07:28:55.089754 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:55 crc kubenswrapper[4612]: E1203 07:28:55.089771 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:55 crc kubenswrapper[4612]: E1203 07:28:55.089898 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:56 crc kubenswrapper[4612]: I1203 07:28:56.089166 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:56 crc kubenswrapper[4612]: E1203 07:28:56.089341 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:57 crc kubenswrapper[4612]: I1203 07:28:57.088723 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:57 crc kubenswrapper[4612]: I1203 07:28:57.088795 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:57 crc kubenswrapper[4612]: E1203 07:28:57.089653 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:57 crc kubenswrapper[4612]: E1203 07:28:57.089749 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:28:57 crc kubenswrapper[4612]: I1203 07:28:57.089670 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:57 crc kubenswrapper[4612]: E1203 07:28:57.090296 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:58 crc kubenswrapper[4612]: I1203 07:28:58.088571 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:28:58 crc kubenswrapper[4612]: E1203 07:28:58.089163 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:28:59 crc kubenswrapper[4612]: I1203 07:28:59.089126 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:28:59 crc kubenswrapper[4612]: I1203 07:28:59.089171 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:28:59 crc kubenswrapper[4612]: I1203 07:28:59.089223 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:28:59 crc kubenswrapper[4612]: E1203 07:28:59.089379 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:28:59 crc kubenswrapper[4612]: E1203 07:28:59.090483 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:28:59 crc kubenswrapper[4612]: E1203 07:28:59.090867 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:00 crc kubenswrapper[4612]: I1203 07:29:00.088570 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:00 crc kubenswrapper[4612]: E1203 07:29:00.089088 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:01 crc kubenswrapper[4612]: I1203 07:29:01.089142 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:01 crc kubenswrapper[4612]: I1203 07:29:01.089207 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:01 crc kubenswrapper[4612]: I1203 07:29:01.089305 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:01 crc kubenswrapper[4612]: E1203 07:29:01.089310 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:01 crc kubenswrapper[4612]: E1203 07:29:01.089563 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:01 crc kubenswrapper[4612]: E1203 07:29:01.089596 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:02 crc kubenswrapper[4612]: I1203 07:29:02.089611 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:02 crc kubenswrapper[4612]: I1203 07:29:02.090773 4612 scope.go:117] "RemoveContainer" containerID="77b7a58278abf172c403f8814db67503b55ef064c7bf6763c010234431a10b74" Dec 03 07:29:02 crc kubenswrapper[4612]: E1203 07:29:02.090843 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:02 crc kubenswrapper[4612]: E1203 07:29:02.091062 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" Dec 03 07:29:02 crc kubenswrapper[4612]: I1203 07:29:02.108694 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 03 07:29:03 crc kubenswrapper[4612]: I1203 07:29:03.088424 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:03 crc kubenswrapper[4612]: E1203 07:29:03.088761 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:03 crc kubenswrapper[4612]: I1203 07:29:03.088772 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:03 crc kubenswrapper[4612]: I1203 07:29:03.088876 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:03 crc kubenswrapper[4612]: E1203 07:29:03.089068 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:03 crc kubenswrapper[4612]: E1203 07:29:03.089333 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:04 crc kubenswrapper[4612]: I1203 07:29:04.089154 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:04 crc kubenswrapper[4612]: E1203 07:29:04.089406 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:05 crc kubenswrapper[4612]: I1203 07:29:05.089116 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:05 crc kubenswrapper[4612]: I1203 07:29:05.089566 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:05 crc kubenswrapper[4612]: E1203 07:29:05.090213 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:05 crc kubenswrapper[4612]: I1203 07:29:05.090069 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:05 crc kubenswrapper[4612]: E1203 07:29:05.090314 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:05 crc kubenswrapper[4612]: E1203 07:29:05.089741 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:06 crc kubenswrapper[4612]: I1203 07:29:06.089246 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:06 crc kubenswrapper[4612]: E1203 07:29:06.089486 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:07 crc kubenswrapper[4612]: I1203 07:29:07.088487 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:07 crc kubenswrapper[4612]: I1203 07:29:07.088499 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:07 crc kubenswrapper[4612]: E1203 07:29:07.090575 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:07 crc kubenswrapper[4612]: I1203 07:29:07.090978 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:07 crc kubenswrapper[4612]: E1203 07:29:07.091115 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:07 crc kubenswrapper[4612]: E1203 07:29:07.091430 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:07 crc kubenswrapper[4612]: I1203 07:29:07.129895 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=5.129878523 podStartE2EDuration="5.129878523s" podCreationTimestamp="2025-12-03 07:29:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:07.12826349 +0000 UTC m=+110.301620890" watchObservedRunningTime="2025-12-03 07:29:07.129878523 +0000 UTC m=+110.303235923" Dec 03 07:29:08 crc kubenswrapper[4612]: I1203 07:29:08.088865 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:08 crc kubenswrapper[4612]: E1203 07:29:08.089266 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:08 crc kubenswrapper[4612]: I1203 07:29:08.643882 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p52kb_b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d/kube-multus/1.log" Dec 03 07:29:08 crc kubenswrapper[4612]: I1203 07:29:08.645520 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p52kb_b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d/kube-multus/0.log" Dec 03 07:29:08 crc kubenswrapper[4612]: I1203 07:29:08.645571 4612 generic.go:334] "Generic (PLEG): container finished" podID="b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d" containerID="1e2d8f268ad6bc13c478d54774c1d91ca7f7512b0f5c675f2a360be64babac7c" exitCode=1 Dec 03 07:29:08 crc kubenswrapper[4612]: I1203 07:29:08.645617 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-p52kb" event={"ID":"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d","Type":"ContainerDied","Data":"1e2d8f268ad6bc13c478d54774c1d91ca7f7512b0f5c675f2a360be64babac7c"} Dec 03 07:29:08 crc kubenswrapper[4612]: I1203 07:29:08.645673 4612 scope.go:117] "RemoveContainer" containerID="b55da59b6f7aa0eed6345a8090db60fb0fde6bbbe6c01ce926493bc6e92a2a72" Dec 03 07:29:08 crc kubenswrapper[4612]: I1203 07:29:08.646293 4612 scope.go:117] "RemoveContainer" containerID="1e2d8f268ad6bc13c478d54774c1d91ca7f7512b0f5c675f2a360be64babac7c" Dec 03 07:29:08 crc kubenswrapper[4612]: E1203 07:29:08.646563 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-p52kb_openshift-multus(b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d)\"" pod="openshift-multus/multus-p52kb" podUID="b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d" Dec 03 07:29:09 crc kubenswrapper[4612]: I1203 07:29:09.089060 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:09 crc kubenswrapper[4612]: I1203 07:29:09.089103 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:09 crc kubenswrapper[4612]: I1203 07:29:09.089070 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:09 crc kubenswrapper[4612]: E1203 07:29:09.089253 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:09 crc kubenswrapper[4612]: E1203 07:29:09.089334 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:09 crc kubenswrapper[4612]: E1203 07:29:09.089408 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:09 crc kubenswrapper[4612]: I1203 07:29:09.658071 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p52kb_b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d/kube-multus/1.log" Dec 03 07:29:10 crc kubenswrapper[4612]: I1203 07:29:10.089080 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:10 crc kubenswrapper[4612]: E1203 07:29:10.089292 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:11 crc kubenswrapper[4612]: I1203 07:29:11.089235 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:11 crc kubenswrapper[4612]: E1203 07:29:11.089380 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:11 crc kubenswrapper[4612]: I1203 07:29:11.089407 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:11 crc kubenswrapper[4612]: E1203 07:29:11.089481 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:11 crc kubenswrapper[4612]: I1203 07:29:11.089235 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:11 crc kubenswrapper[4612]: E1203 07:29:11.089557 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:12 crc kubenswrapper[4612]: I1203 07:29:12.088507 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:12 crc kubenswrapper[4612]: E1203 07:29:12.088723 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:13 crc kubenswrapper[4612]: I1203 07:29:13.089629 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:13 crc kubenswrapper[4612]: I1203 07:29:13.089743 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:13 crc kubenswrapper[4612]: E1203 07:29:13.089828 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:13 crc kubenswrapper[4612]: E1203 07:29:13.089870 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:13 crc kubenswrapper[4612]: I1203 07:29:13.090053 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:13 crc kubenswrapper[4612]: E1203 07:29:13.090342 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:13 crc kubenswrapper[4612]: I1203 07:29:13.090744 4612 scope.go:117] "RemoveContainer" containerID="77b7a58278abf172c403f8814db67503b55ef064c7bf6763c010234431a10b74" Dec 03 07:29:13 crc kubenswrapper[4612]: E1203 07:29:13.090936 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9krtb_openshift-ovn-kubernetes(64b21a08-7c39-4c31-a34d-88e74edf88c6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" Dec 03 07:29:14 crc kubenswrapper[4612]: I1203 07:29:14.089063 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:14 crc kubenswrapper[4612]: E1203 07:29:14.089267 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:15 crc kubenswrapper[4612]: I1203 07:29:15.089391 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:15 crc kubenswrapper[4612]: E1203 07:29:15.089542 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:15 crc kubenswrapper[4612]: I1203 07:29:15.089756 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:15 crc kubenswrapper[4612]: E1203 07:29:15.089819 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:15 crc kubenswrapper[4612]: I1203 07:29:15.090005 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:15 crc kubenswrapper[4612]: E1203 07:29:15.090071 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:16 crc kubenswrapper[4612]: I1203 07:29:16.089113 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:16 crc kubenswrapper[4612]: E1203 07:29:16.089236 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:17 crc kubenswrapper[4612]: I1203 07:29:17.088900 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:17 crc kubenswrapper[4612]: I1203 07:29:17.089001 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:17 crc kubenswrapper[4612]: I1203 07:29:17.089058 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:17 crc kubenswrapper[4612]: E1203 07:29:17.090810 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:17 crc kubenswrapper[4612]: E1203 07:29:17.090923 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:17 crc kubenswrapper[4612]: E1203 07:29:17.091132 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:17 crc kubenswrapper[4612]: E1203 07:29:17.101006 4612 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 03 07:29:17 crc kubenswrapper[4612]: E1203 07:29:17.175427 4612 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 07:29:18 crc kubenswrapper[4612]: I1203 07:29:18.089538 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:18 crc kubenswrapper[4612]: E1203 07:29:18.089747 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:19 crc kubenswrapper[4612]: I1203 07:29:19.089588 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:19 crc kubenswrapper[4612]: I1203 07:29:19.089686 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:19 crc kubenswrapper[4612]: E1203 07:29:19.089814 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:19 crc kubenswrapper[4612]: I1203 07:29:19.089897 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:19 crc kubenswrapper[4612]: E1203 07:29:19.090166 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:19 crc kubenswrapper[4612]: E1203 07:29:19.090200 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:20 crc kubenswrapper[4612]: I1203 07:29:20.089232 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:20 crc kubenswrapper[4612]: E1203 07:29:20.089607 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:21 crc kubenswrapper[4612]: I1203 07:29:21.088397 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:21 crc kubenswrapper[4612]: E1203 07:29:21.088528 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:21 crc kubenswrapper[4612]: I1203 07:29:21.088574 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:21 crc kubenswrapper[4612]: I1203 07:29:21.088693 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:21 crc kubenswrapper[4612]: E1203 07:29:21.088774 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:21 crc kubenswrapper[4612]: E1203 07:29:21.088912 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:22 crc kubenswrapper[4612]: I1203 07:29:22.089022 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:22 crc kubenswrapper[4612]: E1203 07:29:22.089284 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:22 crc kubenswrapper[4612]: I1203 07:29:22.089829 4612 scope.go:117] "RemoveContainer" containerID="1e2d8f268ad6bc13c478d54774c1d91ca7f7512b0f5c675f2a360be64babac7c" Dec 03 07:29:22 crc kubenswrapper[4612]: E1203 07:29:22.176627 4612 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 07:29:22 crc kubenswrapper[4612]: I1203 07:29:22.700826 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p52kb_b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d/kube-multus/1.log" Dec 03 07:29:22 crc kubenswrapper[4612]: I1203 07:29:22.700895 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-p52kb" event={"ID":"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d","Type":"ContainerStarted","Data":"aba252007b0952f38838dbf99833409c0e250af97a1871710216aae27e184d51"} Dec 03 07:29:23 crc kubenswrapper[4612]: I1203 07:29:23.088406 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:23 crc kubenswrapper[4612]: E1203 07:29:23.088772 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:23 crc kubenswrapper[4612]: I1203 07:29:23.088506 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:23 crc kubenswrapper[4612]: E1203 07:29:23.089154 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:23 crc kubenswrapper[4612]: I1203 07:29:23.088491 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:23 crc kubenswrapper[4612]: E1203 07:29:23.090309 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:24 crc kubenswrapper[4612]: I1203 07:29:24.089531 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:24 crc kubenswrapper[4612]: E1203 07:29:24.089721 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:25 crc kubenswrapper[4612]: I1203 07:29:25.089286 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:25 crc kubenswrapper[4612]: E1203 07:29:25.089448 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:25 crc kubenswrapper[4612]: I1203 07:29:25.089645 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:25 crc kubenswrapper[4612]: E1203 07:29:25.089709 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:25 crc kubenswrapper[4612]: I1203 07:29:25.089836 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:25 crc kubenswrapper[4612]: E1203 07:29:25.089892 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:25 crc kubenswrapper[4612]: I1203 07:29:25.090711 4612 scope.go:117] "RemoveContainer" containerID="77b7a58278abf172c403f8814db67503b55ef064c7bf6763c010234431a10b74" Dec 03 07:29:25 crc kubenswrapper[4612]: I1203 07:29:25.712910 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovnkube-controller/3.log" Dec 03 07:29:25 crc kubenswrapper[4612]: I1203 07:29:25.716232 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerStarted","Data":"8e13493233264eeb98bd45762266f4d596faa09bc50a48ba9abc4680b2fe3476"} Dec 03 07:29:25 crc kubenswrapper[4612]: I1203 07:29:25.716692 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:29:25 crc kubenswrapper[4612]: I1203 07:29:25.856758 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podStartSLOduration=110.85673246 podStartE2EDuration="1m50.85673246s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:25.746439732 +0000 UTC m=+128.919797162" watchObservedRunningTime="2025-12-03 07:29:25.85673246 +0000 UTC m=+129.030089900" Dec 03 07:29:25 crc kubenswrapper[4612]: I1203 07:29:25.858022 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-8m4gl"] Dec 03 07:29:25 crc kubenswrapper[4612]: I1203 07:29:25.858136 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:25 crc kubenswrapper[4612]: E1203 07:29:25.858282 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:27 crc kubenswrapper[4612]: I1203 07:29:27.089274 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:27 crc kubenswrapper[4612]: E1203 07:29:27.091681 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:27 crc kubenswrapper[4612]: I1203 07:29:27.091716 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:27 crc kubenswrapper[4612]: I1203 07:29:27.091766 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:27 crc kubenswrapper[4612]: E1203 07:29:27.091898 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:27 crc kubenswrapper[4612]: E1203 07:29:27.092084 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:27 crc kubenswrapper[4612]: E1203 07:29:27.177755 4612 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 07:29:28 crc kubenswrapper[4612]: I1203 07:29:28.089327 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:28 crc kubenswrapper[4612]: E1203 07:29:28.089559 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:29 crc kubenswrapper[4612]: I1203 07:29:29.089179 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:29 crc kubenswrapper[4612]: I1203 07:29:29.089179 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:29 crc kubenswrapper[4612]: E1203 07:29:29.089299 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:29 crc kubenswrapper[4612]: I1203 07:29:29.089326 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:29 crc kubenswrapper[4612]: E1203 07:29:29.089413 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:29 crc kubenswrapper[4612]: E1203 07:29:29.089501 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:30 crc kubenswrapper[4612]: I1203 07:29:30.089077 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:30 crc kubenswrapper[4612]: E1203 07:29:30.089208 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:31 crc kubenswrapper[4612]: I1203 07:29:31.089480 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:31 crc kubenswrapper[4612]: I1203 07:29:31.089503 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:31 crc kubenswrapper[4612]: I1203 07:29:31.089666 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:31 crc kubenswrapper[4612]: E1203 07:29:31.089661 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 07:29:31 crc kubenswrapper[4612]: E1203 07:29:31.089921 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 07:29:31 crc kubenswrapper[4612]: E1203 07:29:31.090048 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 07:29:32 crc kubenswrapper[4612]: I1203 07:29:32.089077 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:32 crc kubenswrapper[4612]: E1203 07:29:32.089235 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8m4gl" podUID="fca5f46d-010f-4d46-8926-fd2a2cb9ee1e" Dec 03 07:29:33 crc kubenswrapper[4612]: I1203 07:29:33.089055 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:33 crc kubenswrapper[4612]: I1203 07:29:33.089073 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:33 crc kubenswrapper[4612]: I1203 07:29:33.089461 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:33 crc kubenswrapper[4612]: I1203 07:29:33.092004 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 07:29:33 crc kubenswrapper[4612]: I1203 07:29:33.092653 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 07:29:33 crc kubenswrapper[4612]: I1203 07:29:33.093624 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 07:29:33 crc kubenswrapper[4612]: I1203 07:29:33.093891 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 07:29:34 crc kubenswrapper[4612]: I1203 07:29:34.088753 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:34 crc kubenswrapper[4612]: I1203 07:29:34.092473 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 07:29:34 crc kubenswrapper[4612]: I1203 07:29:34.093122 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.054999 4612 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.107755 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9vwq6"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.108884 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.114578 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bl9ht"] Dec 03 07:29:41 crc kubenswrapper[4612]: W1203 07:29:41.123374 4612 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-tls": failed to list *v1.Secret: secrets "machine-api-operator-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 03 07:29:41 crc kubenswrapper[4612]: E1203 07:29:41.123461 4612 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.123652 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 07:29:41 crc kubenswrapper[4612]: W1203 07:29:41.126538 4612 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7": failed to list *v1.Secret: secrets "machine-api-operator-dockercfg-mfbb7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 03 07:29:41 crc kubenswrapper[4612]: E1203 07:29:41.126714 4612 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-dockercfg-mfbb7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-dockercfg-mfbb7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.126973 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.127114 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: W1203 07:29:41.127017 4612 reflector.go:561] object-"openshift-machine-api"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 03 07:29:41 crc kubenswrapper[4612]: E1203 07:29:41.127345 4612 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.138129 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.139187 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vj8kl"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.139623 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.140108 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.140620 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.143030 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-9rnzg"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.143666 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tznzj"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.143974 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.144470 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.144710 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.144890 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.145003 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.145212 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.146701 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.147098 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.147678 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.151310 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.156130 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.156358 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.156826 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b2z4s"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.157278 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.157625 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.157972 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.161935 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-pdlw9"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.167560 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.168134 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.168546 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-pdlw9" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.172015 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-k6tc4"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.172489 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-k6tc4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.178162 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-477tn"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.178685 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.184052 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-j4sxt"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.184492 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.184815 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.185093 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.189378 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.190191 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.194499 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.195072 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.211334 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.211732 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.211904 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.213307 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.213469 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.213623 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.220702 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xrwls"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.221797 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.227137 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.227547 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.227568 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.237367 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.237883 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.238120 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.238164 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.238165 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.238374 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.239302 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.241167 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.242365 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.243792 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.248173 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.243900 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.244168 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.248000 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.249057 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.249237 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.249364 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.249375 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.249491 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.249063 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.249722 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.249894 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.248094 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.270168 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.270370 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.270510 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.270998 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.271259 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.271428 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.272027 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.272259 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.273172 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.273293 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.273405 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.273513 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.273623 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.273731 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.274033 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.274171 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.274196 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.274293 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.274305 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.274395 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.274412 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.274500 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.274537 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.274666 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.274775 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.274886 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.274938 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.274777 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.275047 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.275108 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.275122 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.275181 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.275245 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.275312 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.275349 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.275378 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.275316 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.275452 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.275559 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.275635 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276184 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-mng7v"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276279 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44e16d27-e50e-4140-a860-b876365c09ca-serving-cert\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276327 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb685d41-c42f-4f5f-9639-86691091c485-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-gcf8d\" (UID: \"cb685d41-c42f-4f5f-9639-86691091c485\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276354 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0bf79614-f448-4a49-bbc7-49da6763842f-node-pullsecrets\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276378 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2452bbf0-39ec-4e69-bf8d-62062d801e43-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8764s\" (UID: \"2452bbf0-39ec-4e69-bf8d-62062d801e43\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276407 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/b8ed0ad2-dcc9-459d-a0a8-7d854a591d79-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-d9ftc\" (UID: \"b8ed0ad2-dcc9-459d-a0a8-7d854a591d79\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276429 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-audit-dir\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276452 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tlqp\" (UniqueName: \"kubernetes.io/projected/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-kube-api-access-2tlqp\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276473 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktpxr\" (UniqueName: \"kubernetes.io/projected/4d345e4d-bef1-4c45-89bd-f30b45165dd2-kube-api-access-ktpxr\") pod \"dns-operator-744455d44c-pdlw9\" (UID: \"4d345e4d-bef1-4c45-89bd-f30b45165dd2\") " pod="openshift-dns-operator/dns-operator-744455d44c-pdlw9" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276496 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/15002e9b-936b-438d-b53b-dc8764c9dea3-serving-cert\") pod \"route-controller-manager-6576b87f9c-4dnbm\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276510 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276518 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-audit\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276539 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plppn\" (UniqueName: \"kubernetes.io/projected/2e0be5a5-d836-400e-824a-c7aff9179549-kube-api-access-plppn\") pod \"machine-approver-56656f9798-sjms6\" (UID: \"2e0be5a5-d836-400e-824a-c7aff9179549\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276561 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/dcf07602-ffde-48e4-b15e-ff3a08779244-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-78pcx\" (UID: \"dcf07602-ffde-48e4-b15e-ff3a08779244\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276587 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s945b\" (UniqueName: \"kubernetes.io/projected/2f5e688a-098e-44eb-be54-6a05aa962aeb-kube-api-access-s945b\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276611 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3a191a04-11c7-45aa-a054-9d9bf21469e9-etcd-ca\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276631 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276653 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eeb0f310-3564-404e-aeb5-237cc9267fed-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-l2vxn\" (UID: \"eeb0f310-3564-404e-aeb5-237cc9267fed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276678 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95bx9\" (UniqueName: \"kubernetes.io/projected/cb685d41-c42f-4f5f-9639-86691091c485-kube-api-access-95bx9\") pod \"openshift-apiserver-operator-796bbdcf4f-gcf8d\" (UID: \"cb685d41-c42f-4f5f-9639-86691091c485\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276701 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276725 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/691c61b7-de0d-49cb-b8c2-9148cc2b8167-serving-cert\") pod \"console-operator-58897d9998-b2z4s\" (UID: \"691c61b7-de0d-49cb-b8c2-9148cc2b8167\") " pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276735 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276748 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a191a04-11c7-45aa-a054-9d9bf21469e9-config\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276770 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2e0be5a5-d836-400e-824a-c7aff9179549-auth-proxy-config\") pod \"machine-approver-56656f9798-sjms6\" (UID: \"2e0be5a5-d836-400e-824a-c7aff9179549\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276792 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/44e16d27-e50e-4140-a860-b876365c09ca-audit-dir\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276814 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm9hg\" (UniqueName: \"kubernetes.io/projected/6bae73b0-37ed-4404-935d-c9afce883fd2-kube-api-access-jm9hg\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276874 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tc26f\" (UniqueName: \"kubernetes.io/projected/eeb0f310-3564-404e-aeb5-237cc9267fed-kube-api-access-tc26f\") pod \"openshift-controller-manager-operator-756b6f6bc6-l2vxn\" (UID: \"eeb0f310-3564-404e-aeb5-237cc9267fed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276901 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/691c61b7-de0d-49cb-b8c2-9148cc2b8167-trusted-ca\") pod \"console-operator-58897d9998-b2z4s\" (UID: \"691c61b7-de0d-49cb-b8c2-9148cc2b8167\") " pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.276925 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.278567 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.282689 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.283384 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.283538 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/80eef4fa-3155-4682-8073-82f26a7eb519-trusted-ca\") pod \"ingress-operator-5b745b69d9-bl5ck\" (UID: \"80eef4fa-3155-4682-8073-82f26a7eb519\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.283737 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/44e16d27-e50e-4140-a860-b876365c09ca-encryption-config\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.283769 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb685d41-c42f-4f5f-9639-86691091c485-config\") pod \"openshift-apiserver-operator-796bbdcf4f-gcf8d\" (UID: \"cb685d41-c42f-4f5f-9639-86691091c485\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.283793 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/749db599-e347-4a7b-9ff8-9c33514ee64a-images\") pod \"machine-api-operator-5694c8668f-9vwq6\" (UID: \"749db599-e347-4a7b-9ff8-9c33514ee64a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.283821 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-client-ca\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.283842 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-audit-policies\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.284716 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.284855 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.284956 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.285354 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.285178 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.285712 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.285793 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286175 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f5e688a-098e-44eb-be54-6a05aa962aeb-serving-cert\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286220 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eeb0f310-3564-404e-aeb5-237cc9267fed-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-l2vxn\" (UID: \"eeb0f310-3564-404e-aeb5-237cc9267fed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286260 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3a191a04-11c7-45aa-a054-9d9bf21469e9-etcd-client\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286285 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0ca7acd8-cd3c-4372-ad92-56d142972141-images\") pod \"machine-config-operator-74547568cd-5h2t4\" (UID: \"0ca7acd8-cd3c-4372-ad92-56d142972141\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286306 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0ca7acd8-cd3c-4372-ad92-56d142972141-proxy-tls\") pod \"machine-config-operator-74547568cd-5h2t4\" (UID: \"0ca7acd8-cd3c-4372-ad92-56d142972141\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286326 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286352 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286375 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-config\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286396 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/44e16d27-e50e-4140-a860-b876365c09ca-audit-policies\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286419 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dcf07602-ffde-48e4-b15e-ff3a08779244-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-78pcx\" (UID: \"dcf07602-ffde-48e4-b15e-ff3a08779244\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286458 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5s8h\" (UniqueName: \"kubernetes.io/projected/0ca7acd8-cd3c-4372-ad92-56d142972141-kube-api-access-p5s8h\") pod \"machine-config-operator-74547568cd-5h2t4\" (UID: \"0ca7acd8-cd3c-4372-ad92-56d142972141\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286479 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286501 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/44e16d27-e50e-4140-a860-b876365c09ca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286538 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/15002e9b-936b-438d-b53b-dc8764c9dea3-client-ca\") pod \"route-controller-manager-6576b87f9c-4dnbm\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286577 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkvcq\" (UniqueName: \"kubernetes.io/projected/691c61b7-de0d-49cb-b8c2-9148cc2b8167-kube-api-access-rkvcq\") pod \"console-operator-58897d9998-b2z4s\" (UID: \"691c61b7-de0d-49cb-b8c2-9148cc2b8167\") " pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286599 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0bf79614-f448-4a49-bbc7-49da6763842f-serving-cert\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286622 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/80eef4fa-3155-4682-8073-82f26a7eb519-bound-sa-token\") pod \"ingress-operator-5b745b69d9-bl5ck\" (UID: \"80eef4fa-3155-4682-8073-82f26a7eb519\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286643 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmldl\" (UniqueName: \"kubernetes.io/projected/80eef4fa-3155-4682-8073-82f26a7eb519-kube-api-access-hmldl\") pod \"ingress-operator-5b745b69d9-bl5ck\" (UID: \"80eef4fa-3155-4682-8073-82f26a7eb519\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286668 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286690 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286713 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f5e688a-098e-44eb-be54-6a05aa962aeb-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286735 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0bf79614-f448-4a49-bbc7-49da6763842f-encryption-config\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286786 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/691c61b7-de0d-49cb-b8c2-9148cc2b8167-config\") pod \"console-operator-58897d9998-b2z4s\" (UID: \"691c61b7-de0d-49cb-b8c2-9148cc2b8167\") " pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286810 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6bae73b0-37ed-4404-935d-c9afce883fd2-console-serving-cert\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286832 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-trusted-ca-bundle\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286856 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/44e16d27-e50e-4140-a860-b876365c09ca-etcd-client\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286882 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3a191a04-11c7-45aa-a054-9d9bf21469e9-etcd-service-ca\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286905 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286926 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6bae73b0-37ed-4404-935d-c9afce883fd2-console-oauth-config\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286965 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/80eef4fa-3155-4682-8073-82f26a7eb519-metrics-tls\") pod \"ingress-operator-5b745b69d9-bl5ck\" (UID: \"80eef4fa-3155-4682-8073-82f26a7eb519\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.286993 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pbb7\" (UniqueName: \"kubernetes.io/projected/749db599-e347-4a7b-9ff8-9c33514ee64a-kube-api-access-6pbb7\") pod \"machine-api-operator-5694c8668f-9vwq6\" (UID: \"749db599-e347-4a7b-9ff8-9c33514ee64a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.287014 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/2e0be5a5-d836-400e-824a-c7aff9179549-machine-approver-tls\") pod \"machine-approver-56656f9798-sjms6\" (UID: \"2e0be5a5-d836-400e-824a-c7aff9179549\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.287142 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/749db599-e347-4a7b-9ff8-9c33514ee64a-config\") pod \"machine-api-operator-5694c8668f-9vwq6\" (UID: \"749db599-e347-4a7b-9ff8-9c33514ee64a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.287166 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0bf79614-f448-4a49-bbc7-49da6763842f-etcd-client\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.287187 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.287210 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dcf07602-ffde-48e4-b15e-ff3a08779244-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-78pcx\" (UID: \"dcf07602-ffde-48e4-b15e-ff3a08779244\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.287230 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-oauth-serving-cert\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.287250 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plwzw\" (UniqueName: \"kubernetes.io/projected/0bf79614-f448-4a49-bbc7-49da6763842f-kube-api-access-plwzw\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.287259 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.287281 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4d345e4d-bef1-4c45-89bd-f30b45165dd2-metrics-tls\") pod \"dns-operator-744455d44c-pdlw9\" (UID: \"4d345e4d-bef1-4c45-89bd-f30b45165dd2\") " pod="openshift-dns-operator/dns-operator-744455d44c-pdlw9" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.290354 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.290583 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.291404 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.291481 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.291521 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.291586 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.296715 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.298934 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.299260 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.300236 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.309380 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkjvl\" (UniqueName: \"kubernetes.io/projected/b8ed0ad2-dcc9-459d-a0a8-7d854a591d79-kube-api-access-jkjvl\") pod \"cluster-samples-operator-665b6dd947-d9ftc\" (UID: \"b8ed0ad2-dcc9-459d-a0a8-7d854a591d79\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.309834 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310416 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2452bbf0-39ec-4e69-bf8d-62062d801e43-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8764s\" (UID: \"2452bbf0-39ec-4e69-bf8d-62062d801e43\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310445 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0bf79614-f448-4a49-bbc7-49da6763842f-audit-dir\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310470 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/749db599-e347-4a7b-9ff8-9c33514ee64a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9vwq6\" (UID: \"749db599-e347-4a7b-9ff8-9c33514ee64a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310487 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f5e688a-098e-44eb-be54-6a05aa962aeb-config\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310503 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqs2b\" (UniqueName: \"kubernetes.io/projected/15002e9b-936b-438d-b53b-dc8764c9dea3-kube-api-access-kqs2b\") pod \"route-controller-manager-6576b87f9c-4dnbm\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310520 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-console-config\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310549 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-image-import-ca\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310565 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310584 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpllk\" (UniqueName: \"kubernetes.io/projected/44e16d27-e50e-4140-a860-b876365c09ca-kube-api-access-lpllk\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310598 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2452bbf0-39ec-4e69-bf8d-62062d801e43-config\") pod \"kube-controller-manager-operator-78b949d7b-8764s\" (UID: \"2452bbf0-39ec-4e69-bf8d-62062d801e43\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310613 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mttc\" (UniqueName: \"kubernetes.io/projected/3cb00e09-7604-4998-9c5e-00f758d2de98-kube-api-access-4mttc\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310628 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f5e688a-098e-44eb-be54-6a05aa962aeb-service-ca-bundle\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310643 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a191a04-11c7-45aa-a054-9d9bf21469e9-serving-cert\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310658 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3cb00e09-7604-4998-9c5e-00f758d2de98-serving-cert\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310682 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vk27f\" (UniqueName: \"kubernetes.io/projected/dcf07602-ffde-48e4-b15e-ff3a08779244-kube-api-access-vk27f\") pod \"cluster-image-registry-operator-dc59b4c8b-78pcx\" (UID: \"dcf07602-ffde-48e4-b15e-ff3a08779244\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310696 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgx6d\" (UniqueName: \"kubernetes.io/projected/2b94e895-312e-493f-9720-82e1bffabf02-kube-api-access-dgx6d\") pod \"downloads-7954f5f757-k6tc4\" (UID: \"2b94e895-312e-493f-9720-82e1bffabf02\") " pod="openshift-console/downloads-7954f5f757-k6tc4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310714 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15002e9b-936b-438d-b53b-dc8764c9dea3-config\") pod \"route-controller-manager-6576b87f9c-4dnbm\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310727 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-service-ca\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310746 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310765 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0ca7acd8-cd3c-4372-ad92-56d142972141-auth-proxy-config\") pod \"machine-config-operator-74547568cd-5h2t4\" (UID: \"0ca7acd8-cd3c-4372-ad92-56d142972141\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310779 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e0be5a5-d836-400e-824a-c7aff9179549-config\") pod \"machine-approver-56656f9798-sjms6\" (UID: \"2e0be5a5-d836-400e-824a-c7aff9179549\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310795 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310810 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/44e16d27-e50e-4140-a860-b876365c09ca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310827 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzd88\" (UniqueName: \"kubernetes.io/projected/3a191a04-11c7-45aa-a054-9d9bf21469e9-kube-api-access-rzd88\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310841 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-config\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.310855 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-etcd-serving-ca\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.314164 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.316904 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.317523 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.317735 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.318441 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.329606 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.329853 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.330162 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-kjpvq"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.330926 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.331745 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.332472 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.332637 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dzht7"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.337499 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.337767 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.337881 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.338764 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.350534 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.357470 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.357886 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bl9ht"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.358360 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dzht7" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.359327 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.359874 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.361394 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r42pw"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.362060 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.362528 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7pdzw"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.363379 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-7pdzw" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.367398 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.367634 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.368801 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.369289 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.370342 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.372047 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.372672 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6h5hf"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.372980 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.379540 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.379721 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6h5hf" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.379860 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.380120 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.380180 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-ft54k"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.380571 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-ft54k" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.380821 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.381475 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.381594 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.381932 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-pdlw9"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.384137 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.385172 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.386931 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vj8kl"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.387271 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.392735 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.394174 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.400146 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-9rnzg"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.404033 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-k6tc4"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.404088 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.405265 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-477tn"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.411282 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.412908 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-69zqk"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.413124 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mttc\" (UniqueName: \"kubernetes.io/projected/3cb00e09-7604-4998-9c5e-00f758d2de98-kube-api-access-4mttc\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.413160 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3cb00e09-7604-4998-9c5e-00f758d2de98-serving-cert\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.413180 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vk27f\" (UniqueName: \"kubernetes.io/projected/dcf07602-ffde-48e4-b15e-ff3a08779244-kube-api-access-vk27f\") pod \"cluster-image-registry-operator-dc59b4c8b-78pcx\" (UID: \"dcf07602-ffde-48e4-b15e-ff3a08779244\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.413205 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/db79bc10-b5e3-4852-973f-b00e50d6314e-service-ca-bundle\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.413226 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-etcd-serving-ca\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.413252 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzd88\" (UniqueName: \"kubernetes.io/projected/3a191a04-11c7-45aa-a054-9d9bf21469e9-kube-api-access-rzd88\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.413310 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44e16d27-e50e-4140-a860-b876365c09ca-serving-cert\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.413458 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb685d41-c42f-4f5f-9639-86691091c485-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-gcf8d\" (UID: \"cb685d41-c42f-4f5f-9639-86691091c485\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414087 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04bcb431-3e3f-46ab-be09-7f8299ba2ea3-serving-cert\") pod \"openshift-config-operator-7777fb866f-xrwls\" (UID: \"04bcb431-3e3f-46ab-be09-7f8299ba2ea3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414150 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0bf79614-f448-4a49-bbc7-49da6763842f-node-pullsecrets\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414181 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2452bbf0-39ec-4e69-bf8d-62062d801e43-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8764s\" (UID: \"2452bbf0-39ec-4e69-bf8d-62062d801e43\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414204 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/15002e9b-936b-438d-b53b-dc8764c9dea3-serving-cert\") pod \"route-controller-manager-6576b87f9c-4dnbm\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414228 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktpxr\" (UniqueName: \"kubernetes.io/projected/4d345e4d-bef1-4c45-89bd-f30b45165dd2-kube-api-access-ktpxr\") pod \"dns-operator-744455d44c-pdlw9\" (UID: \"4d345e4d-bef1-4c45-89bd-f30b45165dd2\") " pod="openshift-dns-operator/dns-operator-744455d44c-pdlw9" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414251 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s945b\" (UniqueName: \"kubernetes.io/projected/2f5e688a-098e-44eb-be54-6a05aa962aeb-kube-api-access-s945b\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414273 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72335a3a-bb68-48fc-aee8-833a73ea5991-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bxgnq\" (UID: \"72335a3a-bb68-48fc-aee8-833a73ea5991\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414296 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-audit\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414323 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plppn\" (UniqueName: \"kubernetes.io/projected/2e0be5a5-d836-400e-824a-c7aff9179549-kube-api-access-plppn\") pod \"machine-approver-56656f9798-sjms6\" (UID: \"2e0be5a5-d836-400e-824a-c7aff9179549\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414348 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/dcf07602-ffde-48e4-b15e-ff3a08779244-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-78pcx\" (UID: \"dcf07602-ffde-48e4-b15e-ff3a08779244\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414373 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414427 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/691c61b7-de0d-49cb-b8c2-9148cc2b8167-serving-cert\") pod \"console-operator-58897d9998-b2z4s\" (UID: \"691c61b7-de0d-49cb-b8c2-9148cc2b8167\") " pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414454 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414482 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm9hg\" (UniqueName: \"kubernetes.io/projected/6bae73b0-37ed-4404-935d-c9afce883fd2-kube-api-access-jm9hg\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414507 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/44e16d27-e50e-4140-a860-b876365c09ca-audit-dir\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414528 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414551 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tc26f\" (UniqueName: \"kubernetes.io/projected/eeb0f310-3564-404e-aeb5-237cc9267fed-kube-api-access-tc26f\") pod \"openshift-controller-manager-operator-756b6f6bc6-l2vxn\" (UID: \"eeb0f310-3564-404e-aeb5-237cc9267fed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414559 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-c2f7j"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414579 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/80eef4fa-3155-4682-8073-82f26a7eb519-trusted-ca\") pod \"ingress-operator-5b745b69d9-bl5ck\" (UID: \"80eef4fa-3155-4682-8073-82f26a7eb519\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414618 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb685d41-c42f-4f5f-9639-86691091c485-config\") pod \"openshift-apiserver-operator-796bbdcf4f-gcf8d\" (UID: \"cb685d41-c42f-4f5f-9639-86691091c485\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414639 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f5e688a-098e-44eb-be54-6a05aa962aeb-serving-cert\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414661 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eeb0f310-3564-404e-aeb5-237cc9267fed-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-l2vxn\" (UID: \"eeb0f310-3564-404e-aeb5-237cc9267fed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414683 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7a8152f-0346-4350-a56e-af6018afe93b-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-bpbwj\" (UID: \"e7a8152f-0346-4350-a56e-af6018afe93b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414705 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/749db599-e347-4a7b-9ff8-9c33514ee64a-images\") pod \"machine-api-operator-5694c8668f-9vwq6\" (UID: \"749db599-e347-4a7b-9ff8-9c33514ee64a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414726 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-client-ca\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414749 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414771 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3a191a04-11c7-45aa-a054-9d9bf21469e9-etcd-client\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414795 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0ca7acd8-cd3c-4372-ad92-56d142972141-images\") pod \"machine-config-operator-74547568cd-5h2t4\" (UID: \"0ca7acd8-cd3c-4372-ad92-56d142972141\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414816 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dcf07602-ffde-48e4-b15e-ff3a08779244-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-78pcx\" (UID: \"dcf07602-ffde-48e4-b15e-ff3a08779244\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414840 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/44e16d27-e50e-4140-a860-b876365c09ca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414867 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkvcq\" (UniqueName: \"kubernetes.io/projected/691c61b7-de0d-49cb-b8c2-9148cc2b8167-kube-api-access-rkvcq\") pod \"console-operator-58897d9998-b2z4s\" (UID: \"691c61b7-de0d-49cb-b8c2-9148cc2b8167\") " pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414891 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0bf79614-f448-4a49-bbc7-49da6763842f-serving-cert\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414912 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414936 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f5e688a-098e-44eb-be54-6a05aa962aeb-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.414987 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rk7xt\" (UniqueName: \"kubernetes.io/projected/166fff6a-f5bb-4675-86cb-1be1c8b5ed7a-kube-api-access-rk7xt\") pod \"kube-storage-version-migrator-operator-b67b599dd-5mk52\" (UID: \"166fff6a-f5bb-4675-86cb-1be1c8b5ed7a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415057 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415082 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6bae73b0-37ed-4404-935d-c9afce883fd2-console-oauth-config\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415133 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/db79bc10-b5e3-4852-973f-b00e50d6314e-default-certificate\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415156 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/166fff6a-f5bb-4675-86cb-1be1c8b5ed7a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5mk52\" (UID: \"166fff6a-f5bb-4675-86cb-1be1c8b5ed7a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415210 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/2e0be5a5-d836-400e-824a-c7aff9179549-machine-approver-tls\") pod \"machine-approver-56656f9798-sjms6\" (UID: \"2e0be5a5-d836-400e-824a-c7aff9179549\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415237 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dcf07602-ffde-48e4-b15e-ff3a08779244-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-78pcx\" (UID: \"dcf07602-ffde-48e4-b15e-ff3a08779244\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415283 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2452bbf0-39ec-4e69-bf8d-62062d801e43-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8764s\" (UID: \"2452bbf0-39ec-4e69-bf8d-62062d801e43\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415307 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4d345e4d-bef1-4c45-89bd-f30b45165dd2-metrics-tls\") pod \"dns-operator-744455d44c-pdlw9\" (UID: \"4d345e4d-bef1-4c45-89bd-f30b45165dd2\") " pod="openshift-dns-operator/dns-operator-744455d44c-pdlw9" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415342 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415330 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0bf79614-f448-4a49-bbc7-49da6763842f-audit-dir\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415380 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f5e688a-098e-44eb-be54-6a05aa962aeb-config\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415404 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqs2b\" (UniqueName: \"kubernetes.io/projected/15002e9b-936b-438d-b53b-dc8764c9dea3-kube-api-access-kqs2b\") pod \"route-controller-manager-6576b87f9c-4dnbm\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415440 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-69zqk" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415452 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/72335a3a-bb68-48fc-aee8-833a73ea5991-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bxgnq\" (UID: \"72335a3a-bb68-48fc-aee8-833a73ea5991\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415479 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpllk\" (UniqueName: \"kubernetes.io/projected/44e16d27-e50e-4140-a860-b876365c09ca-kube-api-access-lpllk\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415525 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-console-config\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415574 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgx6d\" (UniqueName: \"kubernetes.io/projected/2b94e895-312e-493f-9720-82e1bffabf02-kube-api-access-dgx6d\") pod \"downloads-7954f5f757-k6tc4\" (UID: \"2b94e895-312e-493f-9720-82e1bffabf02\") " pod="openshift-console/downloads-7954f5f757-k6tc4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415626 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f5e688a-098e-44eb-be54-6a05aa962aeb-service-ca-bundle\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415651 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a191a04-11c7-45aa-a054-9d9bf21469e9-serving-cert\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415700 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15002e9b-936b-438d-b53b-dc8764c9dea3-config\") pod \"route-controller-manager-6576b87f9c-4dnbm\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415723 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-service-ca\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415770 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5frr\" (UniqueName: \"kubernetes.io/projected/db79bc10-b5e3-4852-973f-b00e50d6314e-kube-api-access-h5frr\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415796 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/db79bc10-b5e3-4852-973f-b00e50d6314e-metrics-certs\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415819 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0ca7acd8-cd3c-4372-ad92-56d142972141-auth-proxy-config\") pod \"machine-config-operator-74547568cd-5h2t4\" (UID: \"0ca7acd8-cd3c-4372-ad92-56d142972141\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415871 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e0be5a5-d836-400e-824a-c7aff9179549-config\") pod \"machine-approver-56656f9798-sjms6\" (UID: \"2e0be5a5-d836-400e-824a-c7aff9179549\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415894 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415962 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415992 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/44e16d27-e50e-4140-a860-b876365c09ca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416013 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-config\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416062 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/db28a622-4b74-49e9-bd91-6f2a253583fe-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-dzht7\" (UID: \"db28a622-4b74-49e9-bd91-6f2a253583fe\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dzht7" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416095 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/b8ed0ad2-dcc9-459d-a0a8-7d854a591d79-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-d9ftc\" (UID: \"b8ed0ad2-dcc9-459d-a0a8-7d854a591d79\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416143 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-audit-dir\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416167 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tlqp\" (UniqueName: \"kubernetes.io/projected/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-kube-api-access-2tlqp\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416214 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3a191a04-11c7-45aa-a054-9d9bf21469e9-etcd-ca\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416459 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eeb0f310-3564-404e-aeb5-237cc9267fed-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-l2vxn\" (UID: \"eeb0f310-3564-404e-aeb5-237cc9267fed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416489 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95bx9\" (UniqueName: \"kubernetes.io/projected/cb685d41-c42f-4f5f-9639-86691091c485-kube-api-access-95bx9\") pod \"openshift-apiserver-operator-796bbdcf4f-gcf8d\" (UID: \"cb685d41-c42f-4f5f-9639-86691091c485\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416557 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a191a04-11c7-45aa-a054-9d9bf21469e9-config\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416662 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2e0be5a5-d836-400e-824a-c7aff9179549-auth-proxy-config\") pod \"machine-approver-56656f9798-sjms6\" (UID: \"2e0be5a5-d836-400e-824a-c7aff9179549\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416713 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7a8152f-0346-4350-a56e-af6018afe93b-config\") pod \"kube-apiserver-operator-766d6c64bb-bpbwj\" (UID: \"e7a8152f-0346-4350-a56e-af6018afe93b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416738 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/691c61b7-de0d-49cb-b8c2-9148cc2b8167-trusted-ca\") pod \"console-operator-58897d9998-b2z4s\" (UID: \"691c61b7-de0d-49cb-b8c2-9148cc2b8167\") " pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416788 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72335a3a-bb68-48fc-aee8-833a73ea5991-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bxgnq\" (UID: \"72335a3a-bb68-48fc-aee8-833a73ea5991\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416814 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/44e16d27-e50e-4140-a860-b876365c09ca-encryption-config\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416867 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-audit-policies\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416884 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0bf79614-f448-4a49-bbc7-49da6763842f-node-pullsecrets\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416900 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416959 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0ca7acd8-cd3c-4372-ad92-56d142972141-proxy-tls\") pod \"machine-config-operator-74547568cd-5h2t4\" (UID: \"0ca7acd8-cd3c-4372-ad92-56d142972141\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.416986 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/166fff6a-f5bb-4675-86cb-1be1c8b5ed7a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5mk52\" (UID: \"166fff6a-f5bb-4675-86cb-1be1c8b5ed7a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417036 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/04bcb431-3e3f-46ab-be09-7f8299ba2ea3-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xrwls\" (UID: \"04bcb431-3e3f-46ab-be09-7f8299ba2ea3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417067 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-config\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417114 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/44e16d27-e50e-4140-a860-b876365c09ca-audit-policies\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417150 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5s8h\" (UniqueName: \"kubernetes.io/projected/0ca7acd8-cd3c-4372-ad92-56d142972141-kube-api-access-p5s8h\") pod \"machine-config-operator-74547568cd-5h2t4\" (UID: \"0ca7acd8-cd3c-4372-ad92-56d142972141\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417210 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417240 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-679jb\" (UniqueName: \"kubernetes.io/projected/edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7-kube-api-access-679jb\") pod \"multus-admission-controller-857f4d67dd-7pdzw\" (UID: \"edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7pdzw" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417305 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7a8152f-0346-4350-a56e-af6018afe93b-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-bpbwj\" (UID: \"e7a8152f-0346-4350-a56e-af6018afe93b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417186 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-etcd-serving-ca\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417358 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/15002e9b-936b-438d-b53b-dc8764c9dea3-client-ca\") pod \"route-controller-manager-6576b87f9c-4dnbm\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417528 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/80eef4fa-3155-4682-8073-82f26a7eb519-bound-sa-token\") pod \"ingress-operator-5b745b69d9-bl5ck\" (UID: \"80eef4fa-3155-4682-8073-82f26a7eb519\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417607 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmldl\" (UniqueName: \"kubernetes.io/projected/80eef4fa-3155-4682-8073-82f26a7eb519-kube-api-access-hmldl\") pod \"ingress-operator-5b745b69d9-bl5ck\" (UID: \"80eef4fa-3155-4682-8073-82f26a7eb519\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417692 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417770 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/691c61b7-de0d-49cb-b8c2-9148cc2b8167-config\") pod \"console-operator-58897d9998-b2z4s\" (UID: \"691c61b7-de0d-49cb-b8c2-9148cc2b8167\") " pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417837 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0bf79614-f448-4a49-bbc7-49da6763842f-encryption-config\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.417912 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6bae73b0-37ed-4404-935d-c9afce883fd2-console-serving-cert\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418008 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-trusted-ca-bundle\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418089 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7pdzw\" (UID: \"edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7pdzw" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418164 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3a191a04-11c7-45aa-a054-9d9bf21469e9-etcd-service-ca\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418237 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/44e16d27-e50e-4140-a860-b876365c09ca-etcd-client\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418308 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/80eef4fa-3155-4682-8073-82f26a7eb519-metrics-tls\") pod \"ingress-operator-5b745b69d9-bl5ck\" (UID: \"80eef4fa-3155-4682-8073-82f26a7eb519\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418446 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pbb7\" (UniqueName: \"kubernetes.io/projected/749db599-e347-4a7b-9ff8-9c33514ee64a-kube-api-access-6pbb7\") pod \"machine-api-operator-5694c8668f-9vwq6\" (UID: \"749db599-e347-4a7b-9ff8-9c33514ee64a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418505 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/15002e9b-936b-438d-b53b-dc8764c9dea3-client-ca\") pod \"route-controller-manager-6576b87f9c-4dnbm\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418576 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-oauth-serving-cert\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418646 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xssf\" (UniqueName: \"kubernetes.io/projected/04bcb431-3e3f-46ab-be09-7f8299ba2ea3-kube-api-access-6xssf\") pod \"openshift-config-operator-7777fb866f-xrwls\" (UID: \"04bcb431-3e3f-46ab-be09-7f8299ba2ea3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418719 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr7rs\" (UniqueName: \"kubernetes.io/projected/db28a622-4b74-49e9-bd91-6f2a253583fe-kube-api-access-wr7rs\") pod \"control-plane-machine-set-operator-78cbb6b69f-dzht7\" (UID: \"db28a622-4b74-49e9-bd91-6f2a253583fe\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dzht7" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418793 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/749db599-e347-4a7b-9ff8-9c33514ee64a-config\") pod \"machine-api-operator-5694c8668f-9vwq6\" (UID: \"749db599-e347-4a7b-9ff8-9c33514ee64a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418864 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0bf79614-f448-4a49-bbc7-49da6763842f-etcd-client\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418936 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.419032 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkjvl\" (UniqueName: \"kubernetes.io/projected/b8ed0ad2-dcc9-459d-a0a8-7d854a591d79-kube-api-access-jkjvl\") pod \"cluster-samples-operator-665b6dd947-d9ftc\" (UID: \"b8ed0ad2-dcc9-459d-a0a8-7d854a591d79\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.419108 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/db79bc10-b5e3-4852-973f-b00e50d6314e-stats-auth\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.419179 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plwzw\" (UniqueName: \"kubernetes.io/projected/0bf79614-f448-4a49-bbc7-49da6763842f-kube-api-access-plwzw\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.419250 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/749db599-e347-4a7b-9ff8-9c33514ee64a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9vwq6\" (UID: \"749db599-e347-4a7b-9ff8-9c33514ee64a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.419317 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-image-import-ca\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.419387 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.419465 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2452bbf0-39ec-4e69-bf8d-62062d801e43-config\") pod \"kube-controller-manager-operator-78b949d7b-8764s\" (UID: \"2452bbf0-39ec-4e69-bf8d-62062d801e43\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.419786 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dcf07602-ffde-48e4-b15e-ff3a08779244-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-78pcx\" (UID: \"dcf07602-ffde-48e4-b15e-ff3a08779244\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.420263 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2452bbf0-39ec-4e69-bf8d-62062d801e43-config\") pod \"kube-controller-manager-operator-78b949d7b-8764s\" (UID: \"2452bbf0-39ec-4e69-bf8d-62062d801e43\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.420863 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/2e0be5a5-d836-400e-824a-c7aff9179549-machine-approver-tls\") pod \"machine-approver-56656f9798-sjms6\" (UID: \"2e0be5a5-d836-400e-824a-c7aff9179549\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.422469 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/15002e9b-936b-438d-b53b-dc8764c9dea3-serving-cert\") pod \"route-controller-manager-6576b87f9c-4dnbm\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.423106 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-audit\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.423352 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.424555 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/691c61b7-de0d-49cb-b8c2-9148cc2b8167-config\") pod \"console-operator-58897d9998-b2z4s\" (UID: \"691c61b7-de0d-49cb-b8c2-9148cc2b8167\") " pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.425281 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/dcf07602-ffde-48e4-b15e-ff3a08779244-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-78pcx\" (UID: \"dcf07602-ffde-48e4-b15e-ff3a08779244\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.425773 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb685d41-c42f-4f5f-9639-86691091c485-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-gcf8d\" (UID: \"cb685d41-c42f-4f5f-9639-86691091c485\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.426007 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/b8ed0ad2-dcc9-459d-a0a8-7d854a591d79-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-d9ftc\" (UID: \"b8ed0ad2-dcc9-459d-a0a8-7d854a591d79\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.426135 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-audit-dir\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.426180 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.426292 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2452bbf0-39ec-4e69-bf8d-62062d801e43-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8764s\" (UID: \"2452bbf0-39ec-4e69-bf8d-62062d801e43\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.426837 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0bf79614-f448-4a49-bbc7-49da6763842f-encryption-config\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.427179 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15002e9b-936b-438d-b53b-dc8764c9dea3-config\") pod \"route-controller-manager-6576b87f9c-4dnbm\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.427771 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-service-ca\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.428218 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a191a04-11c7-45aa-a054-9d9bf21469e9-serving-cert\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.428521 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0ca7acd8-cd3c-4372-ad92-56d142972141-auth-proxy-config\") pod \"machine-config-operator-74547568cd-5h2t4\" (UID: \"0ca7acd8-cd3c-4372-ad92-56d142972141\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.428540 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44e16d27-e50e-4140-a860-b876365c09ca-serving-cert\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.428686 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eeb0f310-3564-404e-aeb5-237cc9267fed-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-l2vxn\" (UID: \"eeb0f310-3564-404e-aeb5-237cc9267fed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.429024 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e0be5a5-d836-400e-824a-c7aff9179549-config\") pod \"machine-approver-56656f9798-sjms6\" (UID: \"2e0be5a5-d836-400e-824a-c7aff9179549\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.429240 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.429775 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a191a04-11c7-45aa-a054-9d9bf21469e9-config\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.430093 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-config\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.430239 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2e0be5a5-d836-400e-824a-c7aff9179549-auth-proxy-config\") pod \"machine-approver-56656f9798-sjms6\" (UID: \"2e0be5a5-d836-400e-824a-c7aff9179549\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.430648 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/44e16d27-e50e-4140-a860-b876365c09ca-audit-policies\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.430976 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/691c61b7-de0d-49cb-b8c2-9148cc2b8167-trusted-ca\") pod \"console-operator-58897d9998-b2z4s\" (UID: \"691c61b7-de0d-49cb-b8c2-9148cc2b8167\") " pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.431015 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-trusted-ca-bundle\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.433585 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/691c61b7-de0d-49cb-b8c2-9148cc2b8167-serving-cert\") pod \"console-operator-58897d9998-b2z4s\" (UID: \"691c61b7-de0d-49cb-b8c2-9148cc2b8167\") " pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.433639 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0bf79614-f448-4a49-bbc7-49da6763842f-audit-dir\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.433668 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f5e688a-098e-44eb-be54-6a05aa962aeb-config\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.415411 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-c2f7j" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.434726 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/44e16d27-e50e-4140-a860-b876365c09ca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.434738 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/749db599-e347-4a7b-9ff8-9c33514ee64a-config\") pod \"machine-api-operator-5694c8668f-9vwq6\" (UID: \"749db599-e347-4a7b-9ff8-9c33514ee64a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.435106 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-console-config\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.435216 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-config\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.435699 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.435723 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418593 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3cb00e09-7604-4998-9c5e-00f758d2de98-serving-cert\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.437014 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/44e16d27-e50e-4140-a860-b876365c09ca-encryption-config\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.436384 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.438134 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3a191a04-11c7-45aa-a054-9d9bf21469e9-etcd-ca\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.436132 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-oauth-serving-cert\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.418880 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.438674 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.438761 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-2z2pw"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.443099 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.439578 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4d345e4d-bef1-4c45-89bd-f30b45165dd2-metrics-tls\") pod \"dns-operator-744455d44c-pdlw9\" (UID: \"4d345e4d-bef1-4c45-89bd-f30b45165dd2\") " pod="openshift-dns-operator/dns-operator-744455d44c-pdlw9" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.440862 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f5e688a-098e-44eb-be54-6a05aa962aeb-service-ca-bundle\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.441020 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.441455 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/44e16d27-e50e-4140-a860-b876365c09ca-audit-dir\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.445078 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b2z4s"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.445209 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tznzj"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.445440 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.445534 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r42pw"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.445617 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6h5hf"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.445702 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.445783 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.445862 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.445958 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dzht7"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.446290 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-2z2pw" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.446707 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/44e16d27-e50e-4140-a860-b876365c09ca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.447143 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.447536 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f5e688a-098e-44eb-be54-6a05aa962aeb-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.447555 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0bf79614-f448-4a49-bbc7-49da6763842f-serving-cert\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.448835 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3a191a04-11c7-45aa-a054-9d9bf21469e9-etcd-client\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.449014 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6bae73b0-37ed-4404-935d-c9afce883fd2-console-serving-cert\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.449325 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.449665 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.449823 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb685d41-c42f-4f5f-9639-86691091c485-config\") pod \"openshift-apiserver-operator-796bbdcf4f-gcf8d\" (UID: \"cb685d41-c42f-4f5f-9639-86691091c485\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.450339 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eeb0f310-3564-404e-aeb5-237cc9267fed-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-l2vxn\" (UID: \"eeb0f310-3564-404e-aeb5-237cc9267fed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.451312 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/749db599-e347-4a7b-9ff8-9c33514ee64a-images\") pod \"machine-api-operator-5694c8668f-9vwq6\" (UID: \"749db599-e347-4a7b-9ff8-9c33514ee64a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.439496 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-audit-policies\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.452130 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-client-ca\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.452412 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.452602 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xrwls"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.453547 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/0bf79614-f448-4a49-bbc7-49da6763842f-image-import-ca\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.456611 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/44e16d27-e50e-4140-a860-b876365c09ca-etcd-client\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.456717 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-kjpvq"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.457508 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0bf79614-f448-4a49-bbc7-49da6763842f-etcd-client\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.457727 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.458313 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.459070 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6bae73b0-37ed-4404-935d-c9afce883fd2-console-oauth-config\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.459738 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.460443 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f5e688a-098e-44eb-be54-6a05aa962aeb-serving-cert\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.461092 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.461719 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3a191a04-11c7-45aa-a054-9d9bf21469e9-etcd-service-ca\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.462121 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-c2f7j"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.463731 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-j4sxt"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.465368 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-ft54k"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.466935 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9vwq6"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.468417 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.469779 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.470894 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.471862 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.472141 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-48rpf"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.474567 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-2z2pw"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.474697 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.474968 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.476034 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7pdzw"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.477051 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-48rpf"] Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.492408 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.496336 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0ca7acd8-cd3c-4372-ad92-56d142972141-images\") pod \"machine-config-operator-74547568cd-5h2t4\" (UID: \"0ca7acd8-cd3c-4372-ad92-56d142972141\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.513009 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520074 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/db79bc10-b5e3-4852-973f-b00e50d6314e-metrics-certs\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520116 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/db28a622-4b74-49e9-bd91-6f2a253583fe-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-dzht7\" (UID: \"db28a622-4b74-49e9-bd91-6f2a253583fe\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dzht7" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520159 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7a8152f-0346-4350-a56e-af6018afe93b-config\") pod \"kube-apiserver-operator-766d6c64bb-bpbwj\" (UID: \"e7a8152f-0346-4350-a56e-af6018afe93b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520184 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72335a3a-bb68-48fc-aee8-833a73ea5991-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bxgnq\" (UID: \"72335a3a-bb68-48fc-aee8-833a73ea5991\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520213 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/166fff6a-f5bb-4675-86cb-1be1c8b5ed7a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5mk52\" (UID: \"166fff6a-f5bb-4675-86cb-1be1c8b5ed7a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520239 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/04bcb431-3e3f-46ab-be09-7f8299ba2ea3-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xrwls\" (UID: \"04bcb431-3e3f-46ab-be09-7f8299ba2ea3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520277 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-679jb\" (UniqueName: \"kubernetes.io/projected/edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7-kube-api-access-679jb\") pod \"multus-admission-controller-857f4d67dd-7pdzw\" (UID: \"edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7pdzw" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520302 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmjwn\" (UniqueName: \"kubernetes.io/projected/5f6b8cd6-7786-458e-93a7-7c4dd363f1c2-kube-api-access-pmjwn\") pod \"catalog-operator-68c6474976-mtcm2\" (UID: \"5f6b8cd6-7786-458e-93a7-7c4dd363f1c2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520333 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7a8152f-0346-4350-a56e-af6018afe93b-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-bpbwj\" (UID: \"e7a8152f-0346-4350-a56e-af6018afe93b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520386 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7pdzw\" (UID: \"edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7pdzw" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520413 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xssf\" (UniqueName: \"kubernetes.io/projected/04bcb431-3e3f-46ab-be09-7f8299ba2ea3-kube-api-access-6xssf\") pod \"openshift-config-operator-7777fb866f-xrwls\" (UID: \"04bcb431-3e3f-46ab-be09-7f8299ba2ea3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520435 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr7rs\" (UniqueName: \"kubernetes.io/projected/db28a622-4b74-49e9-bd91-6f2a253583fe-kube-api-access-wr7rs\") pod \"control-plane-machine-set-operator-78cbb6b69f-dzht7\" (UID: \"db28a622-4b74-49e9-bd91-6f2a253583fe\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dzht7" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520497 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/db79bc10-b5e3-4852-973f-b00e50d6314e-stats-auth\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520555 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/44eb6f69-caac-40de-ae3c-23755e409aba-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s2lkt\" (UID: \"44eb6f69-caac-40de-ae3c-23755e409aba\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520575 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/db79bc10-b5e3-4852-973f-b00e50d6314e-service-ca-bundle\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520599 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04bcb431-3e3f-46ab-be09-7f8299ba2ea3-serving-cert\") pod \"openshift-config-operator-7777fb866f-xrwls\" (UID: \"04bcb431-3e3f-46ab-be09-7f8299ba2ea3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520557 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/04bcb431-3e3f-46ab-be09-7f8299ba2ea3-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xrwls\" (UID: \"04bcb431-3e3f-46ab-be09-7f8299ba2ea3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520627 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72335a3a-bb68-48fc-aee8-833a73ea5991-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bxgnq\" (UID: \"72335a3a-bb68-48fc-aee8-833a73ea5991\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520758 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7a8152f-0346-4350-a56e-af6018afe93b-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-bpbwj\" (UID: \"e7a8152f-0346-4350-a56e-af6018afe93b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520783 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5f6b8cd6-7786-458e-93a7-7c4dd363f1c2-profile-collector-cert\") pod \"catalog-operator-68c6474976-mtcm2\" (UID: \"5f6b8cd6-7786-458e-93a7-7c4dd363f1c2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520821 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rk7xt\" (UniqueName: \"kubernetes.io/projected/166fff6a-f5bb-4675-86cb-1be1c8b5ed7a-kube-api-access-rk7xt\") pod \"kube-storage-version-migrator-operator-b67b599dd-5mk52\" (UID: \"166fff6a-f5bb-4675-86cb-1be1c8b5ed7a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520864 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/db79bc10-b5e3-4852-973f-b00e50d6314e-default-certificate\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520885 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/166fff6a-f5bb-4675-86cb-1be1c8b5ed7a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5mk52\" (UID: \"166fff6a-f5bb-4675-86cb-1be1c8b5ed7a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520910 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t8jb\" (UniqueName: \"kubernetes.io/projected/44eb6f69-caac-40de-ae3c-23755e409aba-kube-api-access-4t8jb\") pod \"olm-operator-6b444d44fb-s2lkt\" (UID: \"44eb6f69-caac-40de-ae3c-23755e409aba\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520932 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/72335a3a-bb68-48fc-aee8-833a73ea5991-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bxgnq\" (UID: \"72335a3a-bb68-48fc-aee8-833a73ea5991\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.520989 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/44eb6f69-caac-40de-ae3c-23755e409aba-srv-cert\") pod \"olm-operator-6b444d44fb-s2lkt\" (UID: \"44eb6f69-caac-40de-ae3c-23755e409aba\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.521005 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5f6b8cd6-7786-458e-93a7-7c4dd363f1c2-srv-cert\") pod \"catalog-operator-68c6474976-mtcm2\" (UID: \"5f6b8cd6-7786-458e-93a7-7c4dd363f1c2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.521029 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5frr\" (UniqueName: \"kubernetes.io/projected/db79bc10-b5e3-4852-973f-b00e50d6314e-kube-api-access-h5frr\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.531699 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.542666 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0ca7acd8-cd3c-4372-ad92-56d142972141-proxy-tls\") pod \"machine-config-operator-74547568cd-5h2t4\" (UID: \"0ca7acd8-cd3c-4372-ad92-56d142972141\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.551592 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.572813 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.591873 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.607074 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/80eef4fa-3155-4682-8073-82f26a7eb519-metrics-tls\") pod \"ingress-operator-5b745b69d9-bl5ck\" (UID: \"80eef4fa-3155-4682-8073-82f26a7eb519\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.618882 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.621614 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/44eb6f69-caac-40de-ae3c-23755e409aba-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s2lkt\" (UID: \"44eb6f69-caac-40de-ae3c-23755e409aba\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.621831 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5f6b8cd6-7786-458e-93a7-7c4dd363f1c2-profile-collector-cert\") pod \"catalog-operator-68c6474976-mtcm2\" (UID: \"5f6b8cd6-7786-458e-93a7-7c4dd363f1c2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.622070 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t8jb\" (UniqueName: \"kubernetes.io/projected/44eb6f69-caac-40de-ae3c-23755e409aba-kube-api-access-4t8jb\") pod \"olm-operator-6b444d44fb-s2lkt\" (UID: \"44eb6f69-caac-40de-ae3c-23755e409aba\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.622135 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/44eb6f69-caac-40de-ae3c-23755e409aba-srv-cert\") pod \"olm-operator-6b444d44fb-s2lkt\" (UID: \"44eb6f69-caac-40de-ae3c-23755e409aba\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.622157 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5f6b8cd6-7786-458e-93a7-7c4dd363f1c2-srv-cert\") pod \"catalog-operator-68c6474976-mtcm2\" (UID: \"5f6b8cd6-7786-458e-93a7-7c4dd363f1c2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.622276 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmjwn\" (UniqueName: \"kubernetes.io/projected/5f6b8cd6-7786-458e-93a7-7c4dd363f1c2-kube-api-access-pmjwn\") pod \"catalog-operator-68c6474976-mtcm2\" (UID: \"5f6b8cd6-7786-458e-93a7-7c4dd363f1c2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.629762 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/80eef4fa-3155-4682-8073-82f26a7eb519-trusted-ca\") pod \"ingress-operator-5b745b69d9-bl5ck\" (UID: \"80eef4fa-3155-4682-8073-82f26a7eb519\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.632131 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.672997 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.692632 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.712887 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.724196 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04bcb431-3e3f-46ab-be09-7f8299ba2ea3-serving-cert\") pod \"openshift-config-operator-7777fb866f-xrwls\" (UID: \"04bcb431-3e3f-46ab-be09-7f8299ba2ea3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.731698 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.752311 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.761797 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/db79bc10-b5e3-4852-973f-b00e50d6314e-service-ca-bundle\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.771600 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.781554 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7a8152f-0346-4350-a56e-af6018afe93b-config\") pod \"kube-apiserver-operator-766d6c64bb-bpbwj\" (UID: \"e7a8152f-0346-4350-a56e-af6018afe93b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.792354 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.812746 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.824992 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/db79bc10-b5e3-4852-973f-b00e50d6314e-default-certificate\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.832324 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.844021 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/db79bc10-b5e3-4852-973f-b00e50d6314e-stats-auth\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.852218 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.865279 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/db79bc10-b5e3-4852-973f-b00e50d6314e-metrics-certs\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.872768 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.893378 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.913028 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.925388 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7a8152f-0346-4350-a56e-af6018afe93b-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-bpbwj\" (UID: \"e7a8152f-0346-4350-a56e-af6018afe93b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.933325 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.954337 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.972898 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 07:29:41 crc kubenswrapper[4612]: I1203 07:29:41.993915 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.012483 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.025972 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/166fff6a-f5bb-4675-86cb-1be1c8b5ed7a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5mk52\" (UID: \"166fff6a-f5bb-4675-86cb-1be1c8b5ed7a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.032773 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.042939 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/166fff6a-f5bb-4675-86cb-1be1c8b5ed7a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5mk52\" (UID: \"166fff6a-f5bb-4675-86cb-1be1c8b5ed7a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.063100 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.073049 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.093337 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.112639 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.126708 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72335a3a-bb68-48fc-aee8-833a73ea5991-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bxgnq\" (UID: \"72335a3a-bb68-48fc-aee8-833a73ea5991\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.132166 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.141569 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72335a3a-bb68-48fc-aee8-833a73ea5991-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bxgnq\" (UID: \"72335a3a-bb68-48fc-aee8-833a73ea5991\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.151825 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.172066 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.192601 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.212231 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.233229 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.252680 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.264092 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/db28a622-4b74-49e9-bd91-6f2a253583fe-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-dzht7\" (UID: \"db28a622-4b74-49e9-bd91-6f2a253583fe\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dzht7" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.273024 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.292141 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.311580 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.331872 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.352065 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.370720 4612 request.go:700] Waited for 1.008406443s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.372667 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.392400 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.412214 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.431466 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 07:29:42 crc kubenswrapper[4612]: E1203 07:29:42.448793 4612 secret.go:188] Couldn't get secret openshift-machine-api/machine-api-operator-tls: failed to sync secret cache: timed out waiting for the condition Dec 03 07:29:42 crc kubenswrapper[4612]: E1203 07:29:42.448859 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/749db599-e347-4a7b-9ff8-9c33514ee64a-machine-api-operator-tls podName:749db599-e347-4a7b-9ff8-9c33514ee64a nodeName:}" failed. No retries permitted until 2025-12-03 07:29:42.948841086 +0000 UTC m=+146.122198496 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "machine-api-operator-tls" (UniqueName: "kubernetes.io/secret/749db599-e347-4a7b-9ff8-9c33514ee64a-machine-api-operator-tls") pod "machine-api-operator-5694c8668f-9vwq6" (UID: "749db599-e347-4a7b-9ff8-9c33514ee64a") : failed to sync secret cache: timed out waiting for the condition Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.459276 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.471321 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.486598 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7pdzw\" (UID: \"edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7pdzw" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.491851 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.512279 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.526928 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/44eb6f69-caac-40de-ae3c-23755e409aba-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s2lkt\" (UID: \"44eb6f69-caac-40de-ae3c-23755e409aba\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.527531 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5f6b8cd6-7786-458e-93a7-7c4dd363f1c2-profile-collector-cert\") pod \"catalog-operator-68c6474976-mtcm2\" (UID: \"5f6b8cd6-7786-458e-93a7-7c4dd363f1c2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.532218 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.537623 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5f6b8cd6-7786-458e-93a7-7c4dd363f1c2-srv-cert\") pod \"catalog-operator-68c6474976-mtcm2\" (UID: \"5f6b8cd6-7786-458e-93a7-7c4dd363f1c2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.552077 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.572132 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.592283 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.611603 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 07:29:42 crc kubenswrapper[4612]: E1203 07:29:42.622292 4612 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/olm-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 03 07:29:42 crc kubenswrapper[4612]: E1203 07:29:42.622359 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/44eb6f69-caac-40de-ae3c-23755e409aba-srv-cert podName:44eb6f69-caac-40de-ae3c-23755e409aba nodeName:}" failed. No retries permitted until 2025-12-03 07:29:43.122338168 +0000 UTC m=+146.295695578 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/44eb6f69-caac-40de-ae3c-23755e409aba-srv-cert") pod "olm-operator-6b444d44fb-s2lkt" (UID: "44eb6f69-caac-40de-ae3c-23755e409aba") : failed to sync secret cache: timed out waiting for the condition Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.632844 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.652416 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.673170 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.692348 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.712495 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.732385 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.753852 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.772734 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.792312 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.812159 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.832916 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.851929 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.872557 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.909074 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzd88\" (UniqueName: \"kubernetes.io/projected/3a191a04-11c7-45aa-a054-9d9bf21469e9-kube-api-access-rzd88\") pod \"etcd-operator-b45778765-j4sxt\" (UID: \"3a191a04-11c7-45aa-a054-9d9bf21469e9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.949752 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mttc\" (UniqueName: \"kubernetes.io/projected/3cb00e09-7604-4998-9c5e-00f758d2de98-kube-api-access-4mttc\") pod \"controller-manager-879f6c89f-vj8kl\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.968504 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vk27f\" (UniqueName: \"kubernetes.io/projected/dcf07602-ffde-48e4-b15e-ff3a08779244-kube-api-access-vk27f\") pod \"cluster-image-registry-operator-dc59b4c8b-78pcx\" (UID: \"dcf07602-ffde-48e4-b15e-ff3a08779244\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.972459 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.977269 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.988572 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" Dec 03 07:29:42 crc kubenswrapper[4612]: I1203 07:29:42.991324 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.012890 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.043689 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:43 crc kubenswrapper[4612]: E1203 07:29:43.043836 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:31:45.043804692 +0000 UTC m=+268.217162122 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.043984 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.044106 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/749db599-e347-4a7b-9ff8-9c33514ee64a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9vwq6\" (UID: \"749db599-e347-4a7b-9ff8-9c33514ee64a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.044194 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.044293 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.047740 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.048057 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.048524 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.060010 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2452bbf0-39ec-4e69-bf8d-62062d801e43-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8764s\" (UID: \"2452bbf0-39ec-4e69-bf8d-62062d801e43\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.080647 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/80eef4fa-3155-4682-8073-82f26a7eb519-bound-sa-token\") pod \"ingress-operator-5b745b69d9-bl5ck\" (UID: \"80eef4fa-3155-4682-8073-82f26a7eb519\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.089639 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmldl\" (UniqueName: \"kubernetes.io/projected/80eef4fa-3155-4682-8073-82f26a7eb519-kube-api-access-hmldl\") pod \"ingress-operator-5b745b69d9-bl5ck\" (UID: \"80eef4fa-3155-4682-8073-82f26a7eb519\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.107207 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktpxr\" (UniqueName: \"kubernetes.io/projected/4d345e4d-bef1-4c45-89bd-f30b45165dd2-kube-api-access-ktpxr\") pod \"dns-operator-744455d44c-pdlw9\" (UID: \"4d345e4d-bef1-4c45-89bd-f30b45165dd2\") " pod="openshift-dns-operator/dns-operator-744455d44c-pdlw9" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.127477 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s945b\" (UniqueName: \"kubernetes.io/projected/2f5e688a-098e-44eb-be54-6a05aa962aeb-kube-api-access-s945b\") pod \"authentication-operator-69f744f599-bl9ht\" (UID: \"2f5e688a-098e-44eb-be54-6a05aa962aeb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.147431 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/44eb6f69-caac-40de-ae3c-23755e409aba-srv-cert\") pod \"olm-operator-6b444d44fb-s2lkt\" (UID: \"44eb6f69-caac-40de-ae3c-23755e409aba\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.147580 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.150461 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/44eb6f69-caac-40de-ae3c-23755e409aba-srv-cert\") pod \"olm-operator-6b444d44fb-s2lkt\" (UID: \"44eb6f69-caac-40de-ae3c-23755e409aba\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.150496 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.155765 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plppn\" (UniqueName: \"kubernetes.io/projected/2e0be5a5-d836-400e-824a-c7aff9179549-kube-api-access-plppn\") pod \"machine-approver-56656f9798-sjms6\" (UID: \"2e0be5a5-d836-400e-824a-c7aff9179549\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.167598 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tlqp\" (UniqueName: \"kubernetes.io/projected/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-kube-api-access-2tlqp\") pod \"oauth-openshift-558db77b4-tznzj\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.190137 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-j4sxt"] Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.190471 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95bx9\" (UniqueName: \"kubernetes.io/projected/cb685d41-c42f-4f5f-9639-86691091c485-kube-api-access-95bx9\") pod \"openshift-apiserver-operator-796bbdcf4f-gcf8d\" (UID: \"cb685d41-c42f-4f5f-9639-86691091c485\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.206337 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-pdlw9" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.207144 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5s8h\" (UniqueName: \"kubernetes.io/projected/0ca7acd8-cd3c-4372-ad92-56d142972141-kube-api-access-p5s8h\") pod \"machine-config-operator-74547568cd-5h2t4\" (UID: \"0ca7acd8-cd3c-4372-ad92-56d142972141\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.230138 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vj8kl"] Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.230694 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqs2b\" (UniqueName: \"kubernetes.io/projected/15002e9b-936b-438d-b53b-dc8764c9dea3-kube-api-access-kqs2b\") pod \"route-controller-manager-6576b87f9c-4dnbm\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.251604 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.285408 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.289097 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpllk\" (UniqueName: \"kubernetes.io/projected/44e16d27-e50e-4140-a860-b876365c09ca-kube-api-access-lpllk\") pod \"apiserver-7bbb656c7d-2ldkj\" (UID: \"44e16d27-e50e-4140-a860-b876365c09ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.296461 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.296461 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.303351 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.309444 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.313139 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plwzw\" (UniqueName: \"kubernetes.io/projected/0bf79614-f448-4a49-bbc7-49da6763842f-kube-api-access-plwzw\") pod \"apiserver-76f77b778f-9rnzg\" (UID: \"0bf79614-f448-4a49-bbc7-49da6763842f\") " pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.313250 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.323852 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.324053 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.331991 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkjvl\" (UniqueName: \"kubernetes.io/projected/b8ed0ad2-dcc9-459d-a0a8-7d854a591d79-kube-api-access-jkjvl\") pod \"cluster-samples-operator-665b6dd947-d9ftc\" (UID: \"b8ed0ad2-dcc9-459d-a0a8-7d854a591d79\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.332019 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.340184 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.367246 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm9hg\" (UniqueName: \"kubernetes.io/projected/6bae73b0-37ed-4404-935d-c9afce883fd2-kube-api-access-jm9hg\") pod \"console-f9d7485db-477tn\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.371806 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.390085 4612 request.go:700] Waited for 1.943964678s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/serviceaccounts/cluster-image-registry-operator/token Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.391439 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.399796 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.406079 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dcf07602-ffde-48e4-b15e-ff3a08779244-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-78pcx\" (UID: \"dcf07602-ffde-48e4-b15e-ff3a08779244\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.412345 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.413590 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-pdlw9"] Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.439752 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.462003 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkvcq\" (UniqueName: \"kubernetes.io/projected/691c61b7-de0d-49cb-b8c2-9148cc2b8167-kube-api-access-rkvcq\") pod \"console-operator-58897d9998-b2z4s\" (UID: \"691c61b7-de0d-49cb-b8c2-9148cc2b8167\") " pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.473078 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tc26f\" (UniqueName: \"kubernetes.io/projected/eeb0f310-3564-404e-aeb5-237cc9267fed-kube-api-access-tc26f\") pod \"openshift-controller-manager-operator-756b6f6bc6-l2vxn\" (UID: \"eeb0f310-3564-404e-aeb5-237cc9267fed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.480826 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.484211 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.496540 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgx6d\" (UniqueName: \"kubernetes.io/projected/2b94e895-312e-493f-9720-82e1bffabf02-kube-api-access-dgx6d\") pod \"downloads-7954f5f757-k6tc4\" (UID: \"2b94e895-312e-493f-9720-82e1bffabf02\") " pod="openshift-console/downloads-7954f5f757-k6tc4" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.496921 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.499757 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.500099 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.518801 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.528246 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-k6tc4" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.533545 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.552662 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.572185 4612 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.572938 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.591708 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.642185 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-679jb\" (UniqueName: \"kubernetes.io/projected/edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7-kube-api-access-679jb\") pod \"multus-admission-controller-857f4d67dd-7pdzw\" (UID: \"edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7pdzw" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.675675 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xssf\" (UniqueName: \"kubernetes.io/projected/04bcb431-3e3f-46ab-be09-7f8299ba2ea3-kube-api-access-6xssf\") pod \"openshift-config-operator-7777fb866f-xrwls\" (UID: \"04bcb431-3e3f-46ab-be09-7f8299ba2ea3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.698476 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7a8152f-0346-4350-a56e-af6018afe93b-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-bpbwj\" (UID: \"e7a8152f-0346-4350-a56e-af6018afe93b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.703344 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-7pdzw" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.725376 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rk7xt\" (UniqueName: \"kubernetes.io/projected/166fff6a-f5bb-4675-86cb-1be1c8b5ed7a-kube-api-access-rk7xt\") pod \"kube-storage-version-migrator-operator-b67b599dd-5mk52\" (UID: \"166fff6a-f5bb-4675-86cb-1be1c8b5ed7a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.731395 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/72335a3a-bb68-48fc-aee8-833a73ea5991-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bxgnq\" (UID: \"72335a3a-bb68-48fc-aee8-833a73ea5991\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.764489 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5frr\" (UniqueName: \"kubernetes.io/projected/db79bc10-b5e3-4852-973f-b00e50d6314e-kube-api-access-h5frr\") pod \"router-default-5444994796-mng7v\" (UID: \"db79bc10-b5e3-4852-973f-b00e50d6314e\") " pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.765107 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.788870 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t8jb\" (UniqueName: \"kubernetes.io/projected/44eb6f69-caac-40de-ae3c-23755e409aba-kube-api-access-4t8jb\") pod \"olm-operator-6b444d44fb-s2lkt\" (UID: \"44eb6f69-caac-40de-ae3c-23755e409aba\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.788919 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmjwn\" (UniqueName: \"kubernetes.io/projected/5f6b8cd6-7786-458e-93a7-7c4dd363f1c2-kube-api-access-pmjwn\") pod \"catalog-operator-68c6474976-mtcm2\" (UID: \"5f6b8cd6-7786-458e-93a7-7c4dd363f1c2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.806415 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" event={"ID":"3cb00e09-7604-4998-9c5e-00f758d2de98","Type":"ContainerStarted","Data":"10b95ae2330f0ee9025ea3e4969f3d58e1f48957466659b010dc1c94a29477e8"} Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.806464 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" event={"ID":"3cb00e09-7604-4998-9c5e-00f758d2de98","Type":"ContainerStarted","Data":"72565cbc57c43791105bcb71d4d1cd75499399b05e53c7b1656239d61f39e721"} Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.808957 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.811564 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.813815 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" event={"ID":"2e0be5a5-d836-400e-824a-c7aff9179549","Type":"ContainerStarted","Data":"4c5954982b2f5239c1225558698ee22bcaa6a7dfd1ac87c59c34e45c84aca100"} Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.823203 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.826260 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-pdlw9" event={"ID":"4d345e4d-bef1-4c45-89bd-f30b45165dd2","Type":"ContainerStarted","Data":"6da26a5f203742809066aa8ae07376ec95c5ef2259efb979b936cd95180c077e"} Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.836621 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.837151 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" event={"ID":"3a191a04-11c7-45aa-a054-9d9bf21469e9","Type":"ContainerStarted","Data":"3acea9fd397faf03277c9fb9ad5fa502883b4b31ca51140995ddd5d588fb1950"} Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.837192 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" event={"ID":"3a191a04-11c7-45aa-a054-9d9bf21469e9","Type":"ContainerStarted","Data":"19b030ae0e305efd195fa4589cdb349f33a3c9414d75bd67b5c22e03e87e6742"} Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.852555 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/749db599-e347-4a7b-9ff8-9c33514ee64a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9vwq6\" (UID: \"749db599-e347-4a7b-9ff8-9c33514ee64a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.862211 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.881803 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pbb7\" (UniqueName: \"kubernetes.io/projected/749db599-e347-4a7b-9ff8-9c33514ee64a-kube-api-access-6pbb7\") pod \"machine-api-operator-5694c8668f-9vwq6\" (UID: \"749db599-e347-4a7b-9ff8-9c33514ee64a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.886782 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr7rs\" (UniqueName: \"kubernetes.io/projected/db28a622-4b74-49e9-bd91-6f2a253583fe-kube-api-access-wr7rs\") pod \"control-plane-machine-set-operator-78cbb6b69f-dzht7\" (UID: \"db28a622-4b74-49e9-bd91-6f2a253583fe\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dzht7" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.911388 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.916301 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.923443 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.944515 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.944932 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.962432 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dzht7" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.972932 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5648b2e6-22d6-4a63-b4bd-c28961ef8511-webhook-cert\") pod \"packageserver-d55dfcdfc-mc86r\" (UID: \"5648b2e6-22d6-4a63-b4bd-c28961ef8511\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.972981 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/df610433-e46b-4098-9b66-0fbf5a28899f-secret-volume\") pod \"collect-profiles-29412435-6x4wh\" (UID: \"df610433-e46b-4098-9b66-0fbf5a28899f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.972998 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69d3e05f-20c9-47b9-a40d-29df9a8e1105-config\") pod \"service-ca-operator-777779d784-d2tjw\" (UID: \"69d3e05f-20c9-47b9-a40d-29df9a8e1105\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973025 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-registry-tls\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973048 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973089 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tsgz\" (UniqueName: \"kubernetes.io/projected/3827c500-45e5-47ad-823b-8b709dc59b23-kube-api-access-2tsgz\") pod \"migrator-59844c95c7-6h5hf\" (UID: \"3827c500-45e5-47ad-823b-8b709dc59b23\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6h5hf" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973115 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4437c69a-44f5-4e28-a3a1-71efc015d6a7-signing-key\") pod \"service-ca-9c57cc56f-ft54k\" (UID: \"4437c69a-44f5-4e28-a3a1-71efc015d6a7\") " pod="openshift-service-ca/service-ca-9c57cc56f-ft54k" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973138 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/44f8faad-fba1-499c-8115-e943359cf8b5-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mnh7m\" (UID: \"44f8faad-fba1-499c-8115-e943359cf8b5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973201 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0a9d1713-389c-4010-b725-3e51fbd8750d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r42pw\" (UID: \"0a9d1713-389c-4010-b725-3e51fbd8750d\") " pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973226 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/69d3e05f-20c9-47b9-a40d-29df9a8e1105-serving-cert\") pod \"service-ca-operator-777779d784-d2tjw\" (UID: \"69d3e05f-20c9-47b9-a40d-29df9a8e1105\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973242 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-registry-certificates\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973256 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbtgp\" (UniqueName: \"kubernetes.io/projected/5648b2e6-22d6-4a63-b4bd-c28961ef8511-kube-api-access-cbtgp\") pod \"packageserver-d55dfcdfc-mc86r\" (UID: \"5648b2e6-22d6-4a63-b4bd-c28961ef8511\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973279 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-ca-trust-extracted\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973293 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/5648b2e6-22d6-4a63-b4bd-c28961ef8511-tmpfs\") pod \"packageserver-d55dfcdfc-mc86r\" (UID: \"5648b2e6-22d6-4a63-b4bd-c28961ef8511\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973308 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/943b34dd-b66a-45e0-919a-57b22def2aa6-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4sqvm\" (UID: \"943b34dd-b66a-45e0-919a-57b22def2aa6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973323 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5648b2e6-22d6-4a63-b4bd-c28961ef8511-apiservice-cert\") pod \"packageserver-d55dfcdfc-mc86r\" (UID: \"5648b2e6-22d6-4a63-b4bd-c28961ef8511\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973367 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-bound-sa-token\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973408 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sslch\" (UniqueName: \"kubernetes.io/projected/943b34dd-b66a-45e0-919a-57b22def2aa6-kube-api-access-sslch\") pod \"machine-config-controller-84d6567774-4sqvm\" (UID: \"943b34dd-b66a-45e0-919a-57b22def2aa6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973439 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-trusted-ca\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973524 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4437c69a-44f5-4e28-a3a1-71efc015d6a7-signing-cabundle\") pod \"service-ca-9c57cc56f-ft54k\" (UID: \"4437c69a-44f5-4e28-a3a1-71efc015d6a7\") " pod="openshift-service-ca/service-ca-9c57cc56f-ft54k" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973558 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-installation-pull-secrets\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973575 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptt66\" (UniqueName: \"kubernetes.io/projected/4437c69a-44f5-4e28-a3a1-71efc015d6a7-kube-api-access-ptt66\") pod \"service-ca-9c57cc56f-ft54k\" (UID: \"4437c69a-44f5-4e28-a3a1-71efc015d6a7\") " pod="openshift-service-ca/service-ca-9c57cc56f-ft54k" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973590 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8tjd\" (UniqueName: \"kubernetes.io/projected/69d3e05f-20c9-47b9-a40d-29df9a8e1105-kube-api-access-h8tjd\") pod \"service-ca-operator-777779d784-d2tjw\" (UID: \"69d3e05f-20c9-47b9-a40d-29df9a8e1105\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973604 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/943b34dd-b66a-45e0-919a-57b22def2aa6-proxy-tls\") pod \"machine-config-controller-84d6567774-4sqvm\" (UID: \"943b34dd-b66a-45e0-919a-57b22def2aa6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973628 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvzj9\" (UniqueName: \"kubernetes.io/projected/44f8faad-fba1-499c-8115-e943359cf8b5-kube-api-access-nvzj9\") pod \"package-server-manager-789f6589d5-mnh7m\" (UID: \"44f8faad-fba1-499c-8115-e943359cf8b5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973674 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q745\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-kube-api-access-4q745\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973706 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czqsq\" (UniqueName: \"kubernetes.io/projected/df610433-e46b-4098-9b66-0fbf5a28899f-kube-api-access-czqsq\") pod \"collect-profiles-29412435-6x4wh\" (UID: \"df610433-e46b-4098-9b66-0fbf5a28899f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973722 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a9d1713-389c-4010-b725-3e51fbd8750d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r42pw\" (UID: \"0a9d1713-389c-4010-b725-3e51fbd8750d\") " pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973739 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/df610433-e46b-4098-9b66-0fbf5a28899f-config-volume\") pod \"collect-profiles-29412435-6x4wh\" (UID: \"df610433-e46b-4098-9b66-0fbf5a28899f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.973762 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzlgf\" (UniqueName: \"kubernetes.io/projected/0a9d1713-389c-4010-b725-3e51fbd8750d-kube-api-access-nzlgf\") pod \"marketplace-operator-79b997595-r42pw\" (UID: \"0a9d1713-389c-4010-b725-3e51fbd8750d\") " pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:29:43 crc kubenswrapper[4612]: E1203 07:29:43.977097 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:44.477084677 +0000 UTC m=+147.650442077 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:43 crc kubenswrapper[4612]: I1203 07:29:43.991712 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.052370 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.077625 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.077874 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/df610433-e46b-4098-9b66-0fbf5a28899f-secret-volume\") pod \"collect-profiles-29412435-6x4wh\" (UID: \"df610433-e46b-4098-9b66-0fbf5a28899f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.077901 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69d3e05f-20c9-47b9-a40d-29df9a8e1105-config\") pod \"service-ca-operator-777779d784-d2tjw\" (UID: \"69d3e05f-20c9-47b9-a40d-29df9a8e1105\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.077919 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-registry-tls\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.078011 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tsgz\" (UniqueName: \"kubernetes.io/projected/3827c500-45e5-47ad-823b-8b709dc59b23-kube-api-access-2tsgz\") pod \"migrator-59844c95c7-6h5hf\" (UID: \"3827c500-45e5-47ad-823b-8b709dc59b23\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6h5hf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.078040 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqbbh\" (UniqueName: \"kubernetes.io/projected/84f9b984-d4ee-44e0-8d47-c7abf063eafd-kube-api-access-sqbbh\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.078060 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e042ad85-b69e-4239-a7eb-c8da5b0fefe8-metrics-tls\") pod \"dns-default-c2f7j\" (UID: \"e042ad85-b69e-4239-a7eb-c8da5b0fefe8\") " pod="openshift-dns/dns-default-c2f7j" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.078089 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4437c69a-44f5-4e28-a3a1-71efc015d6a7-signing-key\") pod \"service-ca-9c57cc56f-ft54k\" (UID: \"4437c69a-44f5-4e28-a3a1-71efc015d6a7\") " pod="openshift-service-ca/service-ca-9c57cc56f-ft54k" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.086625 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69d3e05f-20c9-47b9-a40d-29df9a8e1105-config\") pod \"service-ca-operator-777779d784-d2tjw\" (UID: \"69d3e05f-20c9-47b9-a40d-29df9a8e1105\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.092981 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-mountpoint-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093069 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/44f8faad-fba1-499c-8115-e943359cf8b5-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mnh7m\" (UID: \"44f8faad-fba1-499c-8115-e943359cf8b5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093108 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0a9d1713-389c-4010-b725-3e51fbd8750d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r42pw\" (UID: \"0a9d1713-389c-4010-b725-3e51fbd8750d\") " pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093136 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/69d3e05f-20c9-47b9-a40d-29df9a8e1105-serving-cert\") pod \"service-ca-operator-777779d784-d2tjw\" (UID: \"69d3e05f-20c9-47b9-a40d-29df9a8e1105\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093175 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-registry-certificates\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093217 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-csi-data-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093402 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbtgp\" (UniqueName: \"kubernetes.io/projected/5648b2e6-22d6-4a63-b4bd-c28961ef8511-kube-api-access-cbtgp\") pod \"packageserver-d55dfcdfc-mc86r\" (UID: \"5648b2e6-22d6-4a63-b4bd-c28961ef8511\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093423 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/943b34dd-b66a-45e0-919a-57b22def2aa6-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4sqvm\" (UID: \"943b34dd-b66a-45e0-919a-57b22def2aa6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093448 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-ca-trust-extracted\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093470 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/5648b2e6-22d6-4a63-b4bd-c28961ef8511-tmpfs\") pod \"packageserver-d55dfcdfc-mc86r\" (UID: \"5648b2e6-22d6-4a63-b4bd-c28961ef8511\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093491 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5648b2e6-22d6-4a63-b4bd-c28961ef8511-apiservice-cert\") pod \"packageserver-d55dfcdfc-mc86r\" (UID: \"5648b2e6-22d6-4a63-b4bd-c28961ef8511\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093532 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-bound-sa-token\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093611 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sslch\" (UniqueName: \"kubernetes.io/projected/943b34dd-b66a-45e0-919a-57b22def2aa6-kube-api-access-sslch\") pod \"machine-config-controller-84d6567774-4sqvm\" (UID: \"943b34dd-b66a-45e0-919a-57b22def2aa6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093648 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-plugins-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093699 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-trusted-ca\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093724 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/14511212-9011-47d9-9ecc-2554128b946d-node-bootstrap-token\") pod \"machine-config-server-69zqk\" (UID: \"14511212-9011-47d9-9ecc-2554128b946d\") " pod="openshift-machine-config-operator/machine-config-server-69zqk" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093921 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f42d1c6a-b297-4145-8fe6-e42a1a0114c5-cert\") pod \"ingress-canary-2z2pw\" (UID: \"f42d1c6a-b297-4145-8fe6-e42a1a0114c5\") " pod="openshift-ingress-canary/ingress-canary-2z2pw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093961 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4437c69a-44f5-4e28-a3a1-71efc015d6a7-signing-cabundle\") pod \"service-ca-9c57cc56f-ft54k\" (UID: \"4437c69a-44f5-4e28-a3a1-71efc015d6a7\") " pod="openshift-service-ca/service-ca-9c57cc56f-ft54k" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.093985 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/943b34dd-b66a-45e0-919a-57b22def2aa6-proxy-tls\") pod \"machine-config-controller-84d6567774-4sqvm\" (UID: \"943b34dd-b66a-45e0-919a-57b22def2aa6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094009 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-installation-pull-secrets\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094031 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptt66\" (UniqueName: \"kubernetes.io/projected/4437c69a-44f5-4e28-a3a1-71efc015d6a7-kube-api-access-ptt66\") pod \"service-ca-9c57cc56f-ft54k\" (UID: \"4437c69a-44f5-4e28-a3a1-71efc015d6a7\") " pod="openshift-service-ca/service-ca-9c57cc56f-ft54k" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094185 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8tjd\" (UniqueName: \"kubernetes.io/projected/69d3e05f-20c9-47b9-a40d-29df9a8e1105-kube-api-access-h8tjd\") pod \"service-ca-operator-777779d784-d2tjw\" (UID: \"69d3e05f-20c9-47b9-a40d-29df9a8e1105\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094206 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-registration-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094231 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvzj9\" (UniqueName: \"kubernetes.io/projected/44f8faad-fba1-499c-8115-e943359cf8b5-kube-api-access-nvzj9\") pod \"package-server-manager-789f6589d5-mnh7m\" (UID: \"44f8faad-fba1-499c-8115-e943359cf8b5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094271 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqx9p\" (UniqueName: \"kubernetes.io/projected/f42d1c6a-b297-4145-8fe6-e42a1a0114c5-kube-api-access-mqx9p\") pod \"ingress-canary-2z2pw\" (UID: \"f42d1c6a-b297-4145-8fe6-e42a1a0114c5\") " pod="openshift-ingress-canary/ingress-canary-2z2pw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094295 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q745\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-kube-api-access-4q745\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094318 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czqsq\" (UniqueName: \"kubernetes.io/projected/df610433-e46b-4098-9b66-0fbf5a28899f-kube-api-access-czqsq\") pod \"collect-profiles-29412435-6x4wh\" (UID: \"df610433-e46b-4098-9b66-0fbf5a28899f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094341 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a9d1713-389c-4010-b725-3e51fbd8750d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r42pw\" (UID: \"0a9d1713-389c-4010-b725-3e51fbd8750d\") " pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094486 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-socket-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094510 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/df610433-e46b-4098-9b66-0fbf5a28899f-config-volume\") pod \"collect-profiles-29412435-6x4wh\" (UID: \"df610433-e46b-4098-9b66-0fbf5a28899f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094526 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzlgf\" (UniqueName: \"kubernetes.io/projected/0a9d1713-389c-4010-b725-3e51fbd8750d-kube-api-access-nzlgf\") pod \"marketplace-operator-79b997595-r42pw\" (UID: \"0a9d1713-389c-4010-b725-3e51fbd8750d\") " pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094573 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzgpm\" (UniqueName: \"kubernetes.io/projected/e042ad85-b69e-4239-a7eb-c8da5b0fefe8-kube-api-access-hzgpm\") pod \"dns-default-c2f7j\" (UID: \"e042ad85-b69e-4239-a7eb-c8da5b0fefe8\") " pod="openshift-dns/dns-default-c2f7j" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094609 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sljmb\" (UniqueName: \"kubernetes.io/projected/14511212-9011-47d9-9ecc-2554128b946d-kube-api-access-sljmb\") pod \"machine-config-server-69zqk\" (UID: \"14511212-9011-47d9-9ecc-2554128b946d\") " pod="openshift-machine-config-operator/machine-config-server-69zqk" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094628 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/14511212-9011-47d9-9ecc-2554128b946d-certs\") pod \"machine-config-server-69zqk\" (UID: \"14511212-9011-47d9-9ecc-2554128b946d\") " pod="openshift-machine-config-operator/machine-config-server-69zqk" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094820 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5648b2e6-22d6-4a63-b4bd-c28961ef8511-webhook-cert\") pod \"packageserver-d55dfcdfc-mc86r\" (UID: \"5648b2e6-22d6-4a63-b4bd-c28961ef8511\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094865 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e042ad85-b69e-4239-a7eb-c8da5b0fefe8-config-volume\") pod \"dns-default-c2f7j\" (UID: \"e042ad85-b69e-4239-a7eb-c8da5b0fefe8\") " pod="openshift-dns/dns-default-c2f7j" Dec 03 07:29:44 crc kubenswrapper[4612]: E1203 07:29:44.095175 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:44.59514821 +0000 UTC m=+147.768505610 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.094034 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-registry-tls\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.110105 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/df610433-e46b-4098-9b66-0fbf5a28899f-config-volume\") pod \"collect-profiles-29412435-6x4wh\" (UID: \"df610433-e46b-4098-9b66-0fbf5a28899f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.111624 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/5648b2e6-22d6-4a63-b4bd-c28961ef8511-tmpfs\") pod \"packageserver-d55dfcdfc-mc86r\" (UID: \"5648b2e6-22d6-4a63-b4bd-c28961ef8511\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.113957 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4437c69a-44f5-4e28-a3a1-71efc015d6a7-signing-cabundle\") pod \"service-ca-9c57cc56f-ft54k\" (UID: \"4437c69a-44f5-4e28-a3a1-71efc015d6a7\") " pod="openshift-service-ca/service-ca-9c57cc56f-ft54k" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.116584 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4437c69a-44f5-4e28-a3a1-71efc015d6a7-signing-key\") pod \"service-ca-9c57cc56f-ft54k\" (UID: \"4437c69a-44f5-4e28-a3a1-71efc015d6a7\") " pod="openshift-service-ca/service-ca-9c57cc56f-ft54k" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.117420 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a9d1713-389c-4010-b725-3e51fbd8750d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r42pw\" (UID: \"0a9d1713-389c-4010-b725-3e51fbd8750d\") " pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.127055 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/df610433-e46b-4098-9b66-0fbf5a28899f-secret-volume\") pod \"collect-profiles-29412435-6x4wh\" (UID: \"df610433-e46b-4098-9b66-0fbf5a28899f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.128229 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5648b2e6-22d6-4a63-b4bd-c28961ef8511-apiservice-cert\") pod \"packageserver-d55dfcdfc-mc86r\" (UID: \"5648b2e6-22d6-4a63-b4bd-c28961ef8511\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.129273 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/943b34dd-b66a-45e0-919a-57b22def2aa6-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4sqvm\" (UID: \"943b34dd-b66a-45e0-919a-57b22def2aa6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.130210 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/943b34dd-b66a-45e0-919a-57b22def2aa6-proxy-tls\") pod \"machine-config-controller-84d6567774-4sqvm\" (UID: \"943b34dd-b66a-45e0-919a-57b22def2aa6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.137731 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tsgz\" (UniqueName: \"kubernetes.io/projected/3827c500-45e5-47ad-823b-8b709dc59b23-kube-api-access-2tsgz\") pod \"migrator-59844c95c7-6h5hf\" (UID: \"3827c500-45e5-47ad-823b-8b709dc59b23\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6h5hf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.138296 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-installation-pull-secrets\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.138703 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5648b2e6-22d6-4a63-b4bd-c28961ef8511-webhook-cert\") pod \"packageserver-d55dfcdfc-mc86r\" (UID: \"5648b2e6-22d6-4a63-b4bd-c28961ef8511\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.140618 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-ca-trust-extracted\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.148631 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0a9d1713-389c-4010-b725-3e51fbd8750d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r42pw\" (UID: \"0a9d1713-389c-4010-b725-3e51fbd8750d\") " pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.149868 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-trusted-ca\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.154386 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.159750 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sslch\" (UniqueName: \"kubernetes.io/projected/943b34dd-b66a-45e0-919a-57b22def2aa6-kube-api-access-sslch\") pod \"machine-config-controller-84d6567774-4sqvm\" (UID: \"943b34dd-b66a-45e0-919a-57b22def2aa6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.175052 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s"] Dec 03 07:29:44 crc kubenswrapper[4612]: W1203 07:29:44.180125 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb79bc10_b5e3_4852_973f_b00e50d6314e.slice/crio-bfcf66f0abec160f8a0ea8c38ae272ab8c3d02ccdf961e278a7022e6f6840f8d WatchSource:0}: Error finding container bfcf66f0abec160f8a0ea8c38ae272ab8c3d02ccdf961e278a7022e6f6840f8d: Status 404 returned error can't find the container with id bfcf66f0abec160f8a0ea8c38ae272ab8c3d02ccdf961e278a7022e6f6840f8d Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.185168 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-registry-certificates\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.188301 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/44f8faad-fba1-499c-8115-e943359cf8b5-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mnh7m\" (UID: \"44f8faad-fba1-499c-8115-e943359cf8b5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.200913 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzlgf\" (UniqueName: \"kubernetes.io/projected/0a9d1713-389c-4010-b725-3e51fbd8750d-kube-api-access-nzlgf\") pod \"marketplace-operator-79b997595-r42pw\" (UID: \"0a9d1713-389c-4010-b725-3e51fbd8750d\") " pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.210473 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-registration-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.210534 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqx9p\" (UniqueName: \"kubernetes.io/projected/f42d1c6a-b297-4145-8fe6-e42a1a0114c5-kube-api-access-mqx9p\") pod \"ingress-canary-2z2pw\" (UID: \"f42d1c6a-b297-4145-8fe6-e42a1a0114c5\") " pod="openshift-ingress-canary/ingress-canary-2z2pw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.210571 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-socket-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.210621 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzgpm\" (UniqueName: \"kubernetes.io/projected/e042ad85-b69e-4239-a7eb-c8da5b0fefe8-kube-api-access-hzgpm\") pod \"dns-default-c2f7j\" (UID: \"e042ad85-b69e-4239-a7eb-c8da5b0fefe8\") " pod="openshift-dns/dns-default-c2f7j" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.210716 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sljmb\" (UniqueName: \"kubernetes.io/projected/14511212-9011-47d9-9ecc-2554128b946d-kube-api-access-sljmb\") pod \"machine-config-server-69zqk\" (UID: \"14511212-9011-47d9-9ecc-2554128b946d\") " pod="openshift-machine-config-operator/machine-config-server-69zqk" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.210782 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/14511212-9011-47d9-9ecc-2554128b946d-certs\") pod \"machine-config-server-69zqk\" (UID: \"14511212-9011-47d9-9ecc-2554128b946d\") " pod="openshift-machine-config-operator/machine-config-server-69zqk" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.210819 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e042ad85-b69e-4239-a7eb-c8da5b0fefe8-config-volume\") pod \"dns-default-c2f7j\" (UID: \"e042ad85-b69e-4239-a7eb-c8da5b0fefe8\") " pod="openshift-dns/dns-default-c2f7j" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.210956 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.211925 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqbbh\" (UniqueName: \"kubernetes.io/projected/84f9b984-d4ee-44e0-8d47-c7abf063eafd-kube-api-access-sqbbh\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.211979 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e042ad85-b69e-4239-a7eb-c8da5b0fefe8-metrics-tls\") pod \"dns-default-c2f7j\" (UID: \"e042ad85-b69e-4239-a7eb-c8da5b0fefe8\") " pod="openshift-dns/dns-default-c2f7j" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.212006 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-mountpoint-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.212070 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-csi-data-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.212158 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-plugins-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.212213 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/14511212-9011-47d9-9ecc-2554128b946d-node-bootstrap-token\") pod \"machine-config-server-69zqk\" (UID: \"14511212-9011-47d9-9ecc-2554128b946d\") " pod="openshift-machine-config-operator/machine-config-server-69zqk" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.212244 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f42d1c6a-b297-4145-8fe6-e42a1a0114c5-cert\") pod \"ingress-canary-2z2pw\" (UID: \"f42d1c6a-b297-4145-8fe6-e42a1a0114c5\") " pod="openshift-ingress-canary/ingress-canary-2z2pw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.220338 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e042ad85-b69e-4239-a7eb-c8da5b0fefe8-config-volume\") pod \"dns-default-c2f7j\" (UID: \"e042ad85-b69e-4239-a7eb-c8da5b0fefe8\") " pod="openshift-dns/dns-default-c2f7j" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.220593 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-registration-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.221153 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-mountpoint-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: E1203 07:29:44.223798 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:44.723780206 +0000 UTC m=+147.897137606 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.225659 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4"] Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.230525 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-bound-sa-token\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.231175 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e042ad85-b69e-4239-a7eb-c8da5b0fefe8-metrics-tls\") pod \"dns-default-c2f7j\" (UID: \"e042ad85-b69e-4239-a7eb-c8da5b0fefe8\") " pod="openshift-dns/dns-default-c2f7j" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.231254 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-socket-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.231425 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-plugins-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.231482 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/84f9b984-d4ee-44e0-8d47-c7abf063eafd-csi-data-dir\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.234258 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bl9ht"] Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.240798 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/14511212-9011-47d9-9ecc-2554128b946d-certs\") pod \"machine-config-server-69zqk\" (UID: \"14511212-9011-47d9-9ecc-2554128b946d\") " pod="openshift-machine-config-operator/machine-config-server-69zqk" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.244820 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q745\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-kube-api-access-4q745\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.245185 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/69d3e05f-20c9-47b9-a40d-29df9a8e1105-serving-cert\") pod \"service-ca-operator-777779d784-d2tjw\" (UID: \"69d3e05f-20c9-47b9-a40d-29df9a8e1105\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.245621 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.276314 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f42d1c6a-b297-4145-8fe6-e42a1a0114c5-cert\") pod \"ingress-canary-2z2pw\" (UID: \"f42d1c6a-b297-4145-8fe6-e42a1a0114c5\") " pod="openshift-ingress-canary/ingress-canary-2z2pw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.276897 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbtgp\" (UniqueName: \"kubernetes.io/projected/5648b2e6-22d6-4a63-b4bd-c28961ef8511-kube-api-access-cbtgp\") pod \"packageserver-d55dfcdfc-mc86r\" (UID: \"5648b2e6-22d6-4a63-b4bd-c28961ef8511\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.277602 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czqsq\" (UniqueName: \"kubernetes.io/projected/df610433-e46b-4098-9b66-0fbf5a28899f-kube-api-access-czqsq\") pod \"collect-profiles-29412435-6x4wh\" (UID: \"df610433-e46b-4098-9b66-0fbf5a28899f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.290667 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/14511212-9011-47d9-9ecc-2554128b946d-node-bootstrap-token\") pod \"machine-config-server-69zqk\" (UID: \"14511212-9011-47d9-9ecc-2554128b946d\") " pod="openshift-machine-config-operator/machine-config-server-69zqk" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.291273 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptt66\" (UniqueName: \"kubernetes.io/projected/4437c69a-44f5-4e28-a3a1-71efc015d6a7-kube-api-access-ptt66\") pod \"service-ca-9c57cc56f-ft54k\" (UID: \"4437c69a-44f5-4e28-a3a1-71efc015d6a7\") " pod="openshift-service-ca/service-ca-9c57cc56f-ft54k" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.293426 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.295287 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.302518 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8tjd\" (UniqueName: \"kubernetes.io/projected/69d3e05f-20c9-47b9-a40d-29df9a8e1105-kube-api-access-h8tjd\") pod \"service-ca-operator-777779d784-d2tjw\" (UID: \"69d3e05f-20c9-47b9-a40d-29df9a8e1105\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.313480 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6h5hf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.314100 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:44 crc kubenswrapper[4612]: E1203 07:29:44.314533 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:44.814518055 +0000 UTC m=+147.987875455 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.322288 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.329490 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-ft54k" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.342024 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvzj9\" (UniqueName: \"kubernetes.io/projected/44f8faad-fba1-499c-8115-e943359cf8b5-kube-api-access-nvzj9\") pod \"package-server-manager-789f6589d5-mnh7m\" (UID: \"44f8faad-fba1-499c-8115-e943359cf8b5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.342869 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.343225 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqbbh\" (UniqueName: \"kubernetes.io/projected/84f9b984-d4ee-44e0-8d47-c7abf063eafd-kube-api-access-sqbbh\") pod \"csi-hostpathplugin-48rpf\" (UID: \"84f9b984-d4ee-44e0-8d47-c7abf063eafd\") " pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.346187 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj"] Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.350139 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-9rnzg"] Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.351424 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck"] Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.379378 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqx9p\" (UniqueName: \"kubernetes.io/projected/f42d1c6a-b297-4145-8fe6-e42a1a0114c5-kube-api-access-mqx9p\") pod \"ingress-canary-2z2pw\" (UID: \"f42d1c6a-b297-4145-8fe6-e42a1a0114c5\") " pod="openshift-ingress-canary/ingress-canary-2z2pw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.396476 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-48rpf" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.405737 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sljmb\" (UniqueName: \"kubernetes.io/projected/14511212-9011-47d9-9ecc-2554128b946d-kube-api-access-sljmb\") pod \"machine-config-server-69zqk\" (UID: \"14511212-9011-47d9-9ecc-2554128b946d\") " pod="openshift-machine-config-operator/machine-config-server-69zqk" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.406871 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzgpm\" (UniqueName: \"kubernetes.io/projected/e042ad85-b69e-4239-a7eb-c8da5b0fefe8-kube-api-access-hzgpm\") pod \"dns-default-c2f7j\" (UID: \"e042ad85-b69e-4239-a7eb-c8da5b0fefe8\") " pod="openshift-dns/dns-default-c2f7j" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.415439 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: E1203 07:29:44.415800 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:44.915788755 +0000 UTC m=+148.089146155 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.444899 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx"] Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.454235 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tznzj"] Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.519584 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:44 crc kubenswrapper[4612]: E1203 07:29:44.519891 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:45.019876138 +0000 UTC m=+148.193233538 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.598193 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m" Dec 03 07:29:44 crc kubenswrapper[4612]: W1203 07:29:44.601176 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f5e688a_098e_44eb_be54_6a05aa962aeb.slice/crio-e4c930c9df8774761e1ce53e586bd6f62e3ef21b74c20070269904bff9a8e899 WatchSource:0}: Error finding container e4c930c9df8774761e1ce53e586bd6f62e3ef21b74c20070269904bff9a8e899: Status 404 returned error can't find the container with id e4c930c9df8774761e1ce53e586bd6f62e3ef21b74c20070269904bff9a8e899 Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.630752 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: E1203 07:29:44.631054 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:45.131042493 +0000 UTC m=+148.304399893 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.654841 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-69zqk" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.662573 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-c2f7j" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.669002 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-2z2pw" Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.732754 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:44 crc kubenswrapper[4612]: E1203 07:29:44.733115 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:45.233096474 +0000 UTC m=+148.406453864 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.834722 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:44 crc kubenswrapper[4612]: E1203 07:29:44.836254 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:45.336242243 +0000 UTC m=+148.509599643 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:44 crc kubenswrapper[4612]: W1203 07:29:44.847400 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80eef4fa_3155_4682_8073_82f26a7eb519.slice/crio-eaf68dcf135c7e13bdddaf29d01862e1059127e63f2dcadaeec892e417de1ce8 WatchSource:0}: Error finding container eaf68dcf135c7e13bdddaf29d01862e1059127e63f2dcadaeec892e417de1ce8: Status 404 returned error can't find the container with id eaf68dcf135c7e13bdddaf29d01862e1059127e63f2dcadaeec892e417de1ce8 Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.929254 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" event={"ID":"2e0be5a5-d836-400e-824a-c7aff9179549","Type":"ContainerStarted","Data":"8027c4ce5d4eac2ef28fe5b65ac3a9dec652530e66ae1192edc3d3b7550072ce"} Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.939434 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:44 crc kubenswrapper[4612]: E1203 07:29:44.939739 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:45.43972386 +0000 UTC m=+148.613081260 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:44 crc kubenswrapper[4612]: I1203 07:29:44.949144 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" event={"ID":"0ca7acd8-cd3c-4372-ad92-56d142972141","Type":"ContainerStarted","Data":"87496aab547dfa360748d70dde86eee83130087e2d54a353c4352be1f4e546d4"} Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.032895 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" event={"ID":"2f5e688a-098e-44eb-be54-6a05aa962aeb","Type":"ContainerStarted","Data":"e4c930c9df8774761e1ce53e586bd6f62e3ef21b74c20070269904bff9a8e899"} Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.038491 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" event={"ID":"0bf79614-f448-4a49-bbc7-49da6763842f","Type":"ContainerStarted","Data":"7c12da753eac5ed417aac01a74fa57a40767faeac9bb696c8eeb592edac78fdf"} Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.069054 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm"] Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.073369 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:45 crc kubenswrapper[4612]: E1203 07:29:45.073861 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:45.573844997 +0000 UTC m=+148.747202407 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.083385 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" event={"ID":"dcf07602-ffde-48e4-b15e-ff3a08779244","Type":"ContainerStarted","Data":"b94cc6bb0f7020b88424db8de35641ac24d2c4d4dc5a7e94f8746e69c4f698c1"} Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.175333 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:45 crc kubenswrapper[4612]: E1203 07:29:45.175717 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:45.675687232 +0000 UTC m=+148.849044622 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.199346 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-pdlw9" event={"ID":"4d345e4d-bef1-4c45-89bd-f30b45165dd2","Type":"ContainerStarted","Data":"fa13ec6cc29d9192ee038cab4342c034097a61fc6cf090c30559e22f643a150f"} Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.199387 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"e5e0c45d5ab70bc0dd2e4fd7a276744bddda0e2059ba6479ac2915c47a70dc4e"} Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.199412 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-mng7v" event={"ID":"db79bc10-b5e3-4852-973f-b00e50d6314e","Type":"ContainerStarted","Data":"bfcf66f0abec160f8a0ea8c38ae272ab8c3d02ccdf961e278a7022e6f6840f8d"} Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.208642 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s" event={"ID":"2452bbf0-39ec-4e69-bf8d-62062d801e43","Type":"ContainerStarted","Data":"75510d60a39491d487395cc0c1d6ae5f7bdad378ad064de9f0628bf73ab8d1f6"} Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.240687 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"a3b778255cff5e82a6cfbd4b3b0d10d773cecc168abb08b4f84799365c2e7b9f"} Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.277829 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:45 crc kubenswrapper[4612]: E1203 07:29:45.278195 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:45.778183614 +0000 UTC m=+148.951541014 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.378548 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:45 crc kubenswrapper[4612]: E1203 07:29:45.380921 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:45.880900442 +0000 UTC m=+149.054257862 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.403001 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d"] Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.406207 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b2z4s"] Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.454438 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7pdzw"] Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.486553 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:45 crc kubenswrapper[4612]: E1203 07:29:45.487097 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:45.987086169 +0000 UTC m=+149.160443569 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.592872 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:45 crc kubenswrapper[4612]: E1203 07:29:45.593263 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:46.093248125 +0000 UTC m=+149.266605525 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.644535 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-j4sxt" podStartSLOduration=130.644515617 podStartE2EDuration="2m10.644515617s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:45.602818762 +0000 UTC m=+148.776176172" watchObservedRunningTime="2025-12-03 07:29:45.644515617 +0000 UTC m=+148.817873017" Dec 03 07:29:45 crc kubenswrapper[4612]: W1203 07:29:45.658058 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb685d41_c42f_4f5f_9639_86691091c485.slice/crio-2420a3f178a4efe19b366a21f9863cdf409e627c9614f2d04076586df90d9f51 WatchSource:0}: Error finding container 2420a3f178a4efe19b366a21f9863cdf409e627c9614f2d04076586df90d9f51: Status 404 returned error can't find the container with id 2420a3f178a4efe19b366a21f9863cdf409e627c9614f2d04076586df90d9f51 Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.689571 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj"] Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.735219 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:45 crc kubenswrapper[4612]: E1203 07:29:45.771105 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:46.271085089 +0000 UTC m=+149.444442489 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.813997 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" podStartSLOduration=130.813975585 podStartE2EDuration="2m10.813975585s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:45.772445374 +0000 UTC m=+148.945802774" watchObservedRunningTime="2025-12-03 07:29:45.813975585 +0000 UTC m=+148.987332985" Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.825048 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc"] Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.836208 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:45 crc kubenswrapper[4612]: E1203 07:29:45.836582 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:46.336562907 +0000 UTC m=+149.509920307 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.844226 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-k6tc4"] Dec 03 07:29:45 crc kubenswrapper[4612]: W1203 07:29:45.971040 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podedb4cd28_ef00_4ba5_b8c4_a9d4f91a2ac7.slice/crio-ddfd86612de46a52a6dc112a72674dd5c1d963a6f426fb4ae755eb69e588b97c WatchSource:0}: Error finding container ddfd86612de46a52a6dc112a72674dd5c1d963a6f426fb4ae755eb69e588b97c: Status 404 returned error can't find the container with id ddfd86612de46a52a6dc112a72674dd5c1d963a6f426fb4ae755eb69e588b97c Dec 03 07:29:45 crc kubenswrapper[4612]: I1203 07:29:45.974802 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:45 crc kubenswrapper[4612]: E1203 07:29:45.975193 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:46.47517908 +0000 UTC m=+149.648536480 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.088532 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:46 crc kubenswrapper[4612]: E1203 07:29:46.088866 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:46.58885065 +0000 UTC m=+149.762208050 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.195670 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:46 crc kubenswrapper[4612]: E1203 07:29:46.196056 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:46.696041983 +0000 UTC m=+149.869399383 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.249857 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh"] Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.279087 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" event={"ID":"0ca7acd8-cd3c-4372-ad92-56d142972141","Type":"ContainerStarted","Data":"44d5cc245fa44e9db39d4f0cc9d3d4aed07414e9eb044e0badf84140dbd988a2"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.297389 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:46 crc kubenswrapper[4612]: E1203 07:29:46.297720 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:46.797704313 +0000 UTC m=+149.971061713 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.307861 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7pdzw" event={"ID":"edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7","Type":"ContainerStarted","Data":"ddfd86612de46a52a6dc112a72674dd5c1d963a6f426fb4ae755eb69e588b97c"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.326332 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-mng7v" event={"ID":"db79bc10-b5e3-4852-973f-b00e50d6314e","Type":"ContainerStarted","Data":"e8d664b9d0fbae3dabbd32b972d637cb27d0eaa9350d6af43a62bf35735e720d"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.341560 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-k6tc4" event={"ID":"2b94e895-312e-493f-9720-82e1bffabf02","Type":"ContainerStarted","Data":"79d0363d6c67f13319aa02219dd0e7d5c6c334e82a0c12f59ae6dd61c56fccd1"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.342364 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d223a972c49a8eb902aff0958704c8a0485124f887ff6362d67b31621910bc54"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.347097 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" event={"ID":"15002e9b-936b-438d-b53b-dc8764c9dea3","Type":"ContainerStarted","Data":"668d5b4ffedae750518bf98ea6559fb4626babd53a767656d1640c40d7ff93a3"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.365171 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" event={"ID":"2e0be5a5-d836-400e-824a-c7aff9179549","Type":"ContainerStarted","Data":"ba8318bf7a400e6c385a0b25f3dbe3a9e0974b03ba2829aad20778105c3ad522"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.376741 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9vwq6"] Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.376928 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj" event={"ID":"e7a8152f-0346-4350-a56e-af6018afe93b","Type":"ContainerStarted","Data":"7508abf782bf603846f2ef56f7b450c9c91ff04525da33b7237a63a4185a70c4"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.395662 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-mng7v" podStartSLOduration=131.395647278 podStartE2EDuration="2m11.395647278s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:46.392491656 +0000 UTC m=+149.565849076" watchObservedRunningTime="2025-12-03 07:29:46.395647278 +0000 UTC m=+149.569004678" Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.396513 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2"] Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.400807 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:46 crc kubenswrapper[4612]: E1203 07:29:46.412460 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:46.91242866 +0000 UTC m=+150.085786060 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.415382 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt"] Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.421120 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-69zqk" event={"ID":"14511212-9011-47d9-9ecc-2554128b946d","Type":"ContainerStarted","Data":"9494cb2feff7cd774af9d94f5016deb7cb6bc916697cac1172790bf75c3f7d4d"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.442107 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-sjms6" podStartSLOduration=132.442087985 podStartE2EDuration="2m12.442087985s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:46.440663798 +0000 UTC m=+149.614021198" watchObservedRunningTime="2025-12-03 07:29:46.442087985 +0000 UTC m=+149.615445385" Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.451814 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" event={"ID":"5fb67397-5138-46d4-9a7f-ec95a9cee2b7","Type":"ContainerStarted","Data":"69f12dcf5b93bdda55cfacf8e41deaa174bbaa3a8c0e434f8f62c1e3a043417c"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.468153 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xrwls"] Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.472245 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d" event={"ID":"cb685d41-c42f-4f5f-9639-86691091c485","Type":"ContainerStarted","Data":"2420a3f178a4efe19b366a21f9863cdf409e627c9614f2d04076586df90d9f51"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.482118 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" event={"ID":"2f5e688a-098e-44eb-be54-6a05aa962aeb","Type":"ContainerStarted","Data":"eb577277cff6dec92f28c5b2a42b65c037a583a661454d77411c891ed6c71f90"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.483486 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"ded9c4cbe320c0f558db337885c642d9584638d7b40c048bf29c6e25a5cb821f"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.484127 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" event={"ID":"44e16d27-e50e-4140-a860-b876365c09ca","Type":"ContainerStarted","Data":"cf19695eaeaac56fb48caa451866b76e35780ac6f4beef0a6acb2e466cc1e522"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.484881 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" event={"ID":"80eef4fa-3155-4682-8073-82f26a7eb519","Type":"ContainerStarted","Data":"24c88fa0deef46afea7a458ebb68e4697fec7bfcbac3350267289e8563fdab8d"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.484907 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" event={"ID":"80eef4fa-3155-4682-8073-82f26a7eb519","Type":"ContainerStarted","Data":"eaf68dcf135c7e13bdddaf29d01862e1059127e63f2dcadaeec892e417de1ce8"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.485516 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-b2z4s" event={"ID":"691c61b7-de0d-49cb-b8c2-9148cc2b8167","Type":"ContainerStarted","Data":"dd4442faba08f1216619d898be407b3d50790edc223b2a180b3bc198a1561970"} Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.522223 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:46 crc kubenswrapper[4612]: E1203 07:29:46.523371 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:47.023357279 +0000 UTC m=+150.196714679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.565126 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dzht7"] Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.598836 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-bl9ht" podStartSLOduration=132.598815334 podStartE2EDuration="2m12.598815334s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:46.57225583 +0000 UTC m=+149.745613240" watchObservedRunningTime="2025-12-03 07:29:46.598815334 +0000 UTC m=+149.772172735" Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.605725 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-477tn"] Dec 03 07:29:46 crc kubenswrapper[4612]: W1203 07:29:46.613338 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod749db599_e347_4a7b_9ff8_9c33514ee64a.slice/crio-d23d4c0918783eef9256763d358795aa4cd356fe11027915551f13b99f98e11f WatchSource:0}: Error finding container d23d4c0918783eef9256763d358795aa4cd356fe11027915551f13b99f98e11f: Status 404 returned error can't find the container with id d23d4c0918783eef9256763d358795aa4cd356fe11027915551f13b99f98e11f Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.625519 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:46 crc kubenswrapper[4612]: E1203 07:29:46.625971 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:47.125927863 +0000 UTC m=+150.299285263 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.726211 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:46 crc kubenswrapper[4612]: E1203 07:29:46.726368 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:47.226346201 +0000 UTC m=+150.399703601 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.726537 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:46 crc kubenswrapper[4612]: E1203 07:29:46.726856 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:47.226848024 +0000 UTC m=+150.400205424 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.735134 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52"] Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.739873 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn"] Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.766782 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq"] Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.819327 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm"] Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.828080 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:46 crc kubenswrapper[4612]: E1203 07:29:46.828587 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:47.328572026 +0000 UTC m=+150.501929426 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.840471 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m"] Dec 03 07:29:46 crc kubenswrapper[4612]: W1203 07:29:46.849721 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod166fff6a_f5bb_4675_86cb_1be1c8b5ed7a.slice/crio-22c3533c89cb37c38ac00ec391b28f7051cd15ef26dcb9e28f636d9df76e862d WatchSource:0}: Error finding container 22c3533c89cb37c38ac00ec391b28f7051cd15ef26dcb9e28f636d9df76e862d: Status 404 returned error can't find the container with id 22c3533c89cb37c38ac00ec391b28f7051cd15ef26dcb9e28f636d9df76e862d Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.924465 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.938672 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:46 crc kubenswrapper[4612]: E1203 07:29:46.944962 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:47.444930075 +0000 UTC m=+150.618287475 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.960416 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:29:46 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:29:46 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:29:46 crc kubenswrapper[4612]: healthz check failed Dec 03 07:29:46 crc kubenswrapper[4612]: I1203 07:29:46.960474 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.024135 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r"] Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.046421 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:47 crc kubenswrapper[4612]: E1203 07:29:47.047118 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:47.547098728 +0000 UTC m=+150.720456128 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.128240 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r42pw"] Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.132398 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-48rpf"] Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.138332 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.138393 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.149063 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:47 crc kubenswrapper[4612]: E1203 07:29:47.149417 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:47.649405725 +0000 UTC m=+150.822763125 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.174066 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw"] Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.176008 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-ft54k"] Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.245489 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6h5hf"] Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.263671 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:47 crc kubenswrapper[4612]: E1203 07:29:47.264046 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:47.76403036 +0000 UTC m=+150.937387760 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.276165 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-c2f7j"] Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.299661 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-2z2pw"] Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.372198 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:47 crc kubenswrapper[4612]: E1203 07:29:47.373216 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:47.873204674 +0000 UTC m=+151.046562074 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:47 crc kubenswrapper[4612]: W1203 07:29:47.441193 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode042ad85_b69e_4239_a7eb_c8da5b0fefe8.slice/crio-29d002f236becb75ecc19b74118ab63c909839068055e2bc0520a7ef2fae615f WatchSource:0}: Error finding container 29d002f236becb75ecc19b74118ab63c909839068055e2bc0520a7ef2fae615f: Status 404 returned error can't find the container with id 29d002f236becb75ecc19b74118ab63c909839068055e2bc0520a7ef2fae615f Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.474774 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:47 crc kubenswrapper[4612]: E1203 07:29:47.475133 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:47.975119831 +0000 UTC m=+151.148477231 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.544841 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52" event={"ID":"166fff6a-f5bb-4675-86cb-1be1c8b5ed7a","Type":"ContainerStarted","Data":"22c3533c89cb37c38ac00ec391b28f7051cd15ef26dcb9e28f636d9df76e862d"} Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.588638 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:47 crc kubenswrapper[4612]: E1203 07:29:47.589010 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:48.088991336 +0000 UTC m=+151.262348736 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.629193 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" event={"ID":"80eef4fa-3155-4682-8073-82f26a7eb519","Type":"ContainerStarted","Data":"4f6c6fb3b0779fefd3364e2a0755dc8177dca6b74388fe204f4eeb726834e143"} Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.639442 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6h5hf" event={"ID":"3827c500-45e5-47ad-823b-8b709dc59b23","Type":"ContainerStarted","Data":"f4b6cef527d13ec8960b7ed12cb21abf91431b243b693698e7d5d9b00c088db6"} Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.676481 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-b2z4s" event={"ID":"691c61b7-de0d-49cb-b8c2-9148cc2b8167","Type":"ContainerStarted","Data":"c7d2be3be89387ef129a0219f04a4b523dc1d49613d89c31678b40ac2255992d"} Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.677564 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.683067 4612 patch_prober.go:28] interesting pod/console-operator-58897d9998-b2z4s container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.27:8443/readyz\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.683117 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-b2z4s" podUID="691c61b7-de0d-49cb-b8c2-9148cc2b8167" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.27:8443/readyz\": dial tcp 10.217.0.27:8443: connect: connection refused" Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.689396 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:47 crc kubenswrapper[4612]: E1203 07:29:47.690664 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:48.190650206 +0000 UTC m=+151.364007596 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.691165 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" event={"ID":"0a9d1713-389c-4010-b725-3e51fbd8750d","Type":"ContainerStarted","Data":"519239fab7f029861b009876fff0f14082ecf20e9afe0e395fb28c500040b1c0"} Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.714250 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-pdlw9" event={"ID":"4d345e4d-bef1-4c45-89bd-f30b45165dd2","Type":"ContainerStarted","Data":"bafa70d381ecb8ba4f6fd3be950d883b28ef8b9f1eee03cfce4bad1ea80580c1"} Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.740045 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"803cc396a3476c8bfbfddb1fde651ec2844ab4d7cfbb3a22b1ca984a8929a310"} Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.773256 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d" event={"ID":"cb685d41-c42f-4f5f-9639-86691091c485","Type":"ContainerStarted","Data":"336a7ab8dc4bce3f1f34c6340a52802def5ea03c3e0547c8478abb509671d1b6"} Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.794800 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:47 crc kubenswrapper[4612]: E1203 07:29:47.796010 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:48.295997472 +0000 UTC m=+151.469354872 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.871779 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-c2f7j" event={"ID":"e042ad85-b69e-4239-a7eb-c8da5b0fefe8","Type":"ContainerStarted","Data":"29d002f236becb75ecc19b74118ab63c909839068055e2bc0520a7ef2fae615f"} Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.896417 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:47 crc kubenswrapper[4612]: E1203 07:29:47.897654 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:48.397635002 +0000 UTC m=+151.570992402 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.922243 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:29:47 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:29:47 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:29:47 crc kubenswrapper[4612]: healthz check failed Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.922294 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.932720 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" event={"ID":"5648b2e6-22d6-4a63-b4bd-c28961ef8511","Type":"ContainerStarted","Data":"2e77f5a6b92e9eaaad3f7a91bc91065ce6dd074833078f255fdb73a9ca66fa67"} Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.967489 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" event={"ID":"0ca7acd8-cd3c-4372-ad92-56d142972141","Type":"ContainerStarted","Data":"8b7b29069d4e11a36e8dff71ac96001a73eba597ea6dbbeeead8e1f230ecf2c0"} Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.995660 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" event={"ID":"15002e9b-936b-438d-b53b-dc8764c9dea3","Type":"ContainerStarted","Data":"9bf160b39be871f4658a4dcf9c689a541ed6056995f8ceac89d6191d6b98265f"} Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.996374 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:47 crc kubenswrapper[4612]: I1203 07:29:47.999471 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:48 crc kubenswrapper[4612]: E1203 07:29:48.004908 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:48.504888966 +0000 UTC m=+151.678246366 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.043434 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" event={"ID":"dcf07602-ffde-48e4-b15e-ff3a08779244","Type":"ContainerStarted","Data":"7914ca0d41425c7d8b43f7130cd1cc6675179347d85957c2a0180a2c0e9b75fb"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.055263 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" event={"ID":"749db599-e347-4a7b-9ff8-9c33514ee64a","Type":"ContainerStarted","Data":"d23d4c0918783eef9256763d358795aa4cd356fe11027915551f13b99f98e11f"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.077391 4612 generic.go:334] "Generic (PLEG): container finished" podID="0bf79614-f448-4a49-bbc7-49da6763842f" containerID="63296e709b081155361f04021f15550113b451122105895f422518af8e1dabfc" exitCode=0 Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.077486 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" event={"ID":"0bf79614-f448-4a49-bbc7-49da6763842f","Type":"ContainerDied","Data":"63296e709b081155361f04021f15550113b451122105895f422518af8e1dabfc"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.103529 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:48 crc kubenswrapper[4612]: E1203 07:29:48.104586 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:48.604567636 +0000 UTC m=+151.777925036 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.106459 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" event={"ID":"04bcb431-3e3f-46ab-be09-7f8299ba2ea3","Type":"ContainerStarted","Data":"8fca108fdddab0fd80067f00db8556b82db4de58509fda6f721369d9befd0fab"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.117104 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" event={"ID":"5f6b8cd6-7786-458e-93a7-7c4dd363f1c2","Type":"ContainerStarted","Data":"3b7b8f253396dc790f1f0cfd9c0956dbe0b49f5f6632bb3741963f08e03daa4a"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.128573 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s" event={"ID":"2452bbf0-39ec-4e69-bf8d-62062d801e43","Type":"ContainerStarted","Data":"009ed0cb4af316edc100eff682cdcc8710371f823545a34ffbb7a36067b04d40"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.139291 4612 generic.go:334] "Generic (PLEG): container finished" podID="44e16d27-e50e-4140-a860-b876365c09ca" containerID="b9ad8028ebb0d587f99280bffe3975554aea5c3593d2e842ed10b64022346682" exitCode=0 Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.140423 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" event={"ID":"44e16d27-e50e-4140-a860-b876365c09ca","Type":"ContainerDied","Data":"b9ad8028ebb0d587f99280bffe3975554aea5c3593d2e842ed10b64022346682"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.159168 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw" event={"ID":"69d3e05f-20c9-47b9-a40d-29df9a8e1105","Type":"ContainerStarted","Data":"cebaebc833df93a7d4b366cc0cfd47786c6ae10756d6d5310e3681645b42da77"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.170969 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" event={"ID":"df610433-e46b-4098-9b66-0fbf5a28899f","Type":"ContainerStarted","Data":"0313b89a01b1e88c497bb0987bf04b83339efe89aefc5300569173f691408c19"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.171011 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" event={"ID":"df610433-e46b-4098-9b66-0fbf5a28899f","Type":"ContainerStarted","Data":"61f4c445d9e0921205a918e4063d911dd088cc8604c41ca7bd488b4954d17bef"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.190017 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-477tn" event={"ID":"6bae73b0-37ed-4404-935d-c9afce883fd2","Type":"ContainerStarted","Data":"445cda6a5ab58a8e694b61f2c55ee3339701c6da52f4b13aaf91892faa11c399"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.205869 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:48 crc kubenswrapper[4612]: E1203 07:29:48.209720 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:48.709705386 +0000 UTC m=+151.883062786 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.237107 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m" event={"ID":"44f8faad-fba1-499c-8115-e943359cf8b5","Type":"ContainerStarted","Data":"f10074651e7499cdb3f5188de2b1b92cbb52a6e35bd9d6d6ccc31606ef02d6a6"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.255472 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.277128 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dzht7" event={"ID":"db28a622-4b74-49e9-bd91-6f2a253583fe","Type":"ContainerStarted","Data":"94b6758d1701981d146b623ee271c21bf838460fdb7f36f5e73ea6a00174cd88"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.310551 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:48 crc kubenswrapper[4612]: E1203 07:29:48.311596 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:48.811579801 +0000 UTC m=+151.984937201 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.319002 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-78pcx" podStartSLOduration=133.318987692 podStartE2EDuration="2m13.318987692s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:48.271295763 +0000 UTC m=+151.444653173" watchObservedRunningTime="2025-12-03 07:29:48.318987692 +0000 UTC m=+151.492345092" Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.328990 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" event={"ID":"44eb6f69-caac-40de-ae3c-23755e409aba","Type":"ContainerStarted","Data":"e127e08f0f4dbf381c65e6d564c9c204473708e2dcc588b7359717f8a708167d"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.345336 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-48rpf" event={"ID":"84f9b984-d4ee-44e0-8d47-c7abf063eafd","Type":"ContainerStarted","Data":"ad119a49d63c8bdb079b30ab0180134a33c537eb47b16d11006dc73881474216"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.347061 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-ft54k" event={"ID":"4437c69a-44f5-4e28-a3a1-71efc015d6a7","Type":"ContainerStarted","Data":"982694c29b49612e7c1b9770f34ec87996fa5109ec5ba46593fd32fd08768b5b"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.348021 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc" event={"ID":"b8ed0ad2-dcc9-459d-a0a8-7d854a591d79","Type":"ContainerStarted","Data":"608093dd99e9fc1462141464ea4eb2a38ffc2b831cb799baf16502343417989b"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.355069 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq" event={"ID":"72335a3a-bb68-48fc-aee8-833a73ea5991","Type":"ContainerStarted","Data":"7fec385f9a2cbc6633043a5f6bef12327183d9d4cabc6e59907ccb1e8189817f"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.383464 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-69zqk" event={"ID":"14511212-9011-47d9-9ecc-2554128b946d","Type":"ContainerStarted","Data":"91bf5711c99d84266aed1df5d02a5cc3aecdfe15022b57bfec2675446b4d5b1b"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.392174 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-2z2pw" event={"ID":"f42d1c6a-b297-4145-8fe6-e42a1a0114c5","Type":"ContainerStarted","Data":"aaa8fbd6e0a17d9a9310ac40e8dc32e488134fe09e4253d2364cc2e9cdabf09a"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.397278 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" event={"ID":"5fb67397-5138-46d4-9a7f-ec95a9cee2b7","Type":"ContainerStarted","Data":"f9837eeee3e08db8dd17ecea628f4ffb237caeb1e91d41d9d028e535ced36f64"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.398616 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.414786 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5h2t4" podStartSLOduration=133.414767461 podStartE2EDuration="2m13.414767461s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:48.319394433 +0000 UTC m=+151.492751853" watchObservedRunningTime="2025-12-03 07:29:48.414767461 +0000 UTC m=+151.588124881" Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.417934 4612 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-tznzj container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.28:6443/healthz\": dial tcp 10.217.0.28:6443: connect: connection refused" start-of-body= Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.418004 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" podUID="5fb67397-5138-46d4-9a7f-ec95a9cee2b7" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.28:6443/healthz\": dial tcp 10.217.0.28:6443: connect: connection refused" Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.418864 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:48 crc kubenswrapper[4612]: E1203 07:29:48.420322 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:48.920308034 +0000 UTC m=+152.093665434 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.420540 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-b2z4s" podStartSLOduration=134.42052371 podStartE2EDuration="2m14.42052371s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:48.414402602 +0000 UTC m=+151.587760012" watchObservedRunningTime="2025-12-03 07:29:48.42052371 +0000 UTC m=+151.593881110" Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.427224 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"05b497d9d7b73a0caf7cec1810f3eb438a350b7ee6212784f053b2de01f33ea2"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.427806 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.471212 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn" event={"ID":"eeb0f310-3564-404e-aeb5-237cc9267fed","Type":"ContainerStarted","Data":"2d981ae5ba242e5acf78bbd3524962905e94e83ee19d686fc2b31a1ccc183527"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.471928 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-pdlw9" podStartSLOduration=133.471913284 podStartE2EDuration="2m13.471913284s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:48.471641787 +0000 UTC m=+151.644999187" watchObservedRunningTime="2025-12-03 07:29:48.471913284 +0000 UTC m=+151.645270684" Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.500148 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" event={"ID":"943b34dd-b66a-45e0-919a-57b22def2aa6","Type":"ContainerStarted","Data":"2e35285b38779af54248dff8005b140bd81688a6f4a029129fe30670637a5c48"} Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.520535 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:48 crc kubenswrapper[4612]: E1203 07:29:48.521780 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:49.021765109 +0000 UTC m=+152.195122509 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.546489 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" podStartSLOduration=134.546474016 podStartE2EDuration="2m14.546474016s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:48.546060445 +0000 UTC m=+151.719417845" watchObservedRunningTime="2025-12-03 07:29:48.546474016 +0000 UTC m=+151.719831416" Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.624792 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:48 crc kubenswrapper[4612]: E1203 07:29:48.627562 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:49.127550566 +0000 UTC m=+152.300907966 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.664900 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" podStartSLOduration=133.664880718 podStartE2EDuration="2m13.664880718s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:48.662038085 +0000 UTC m=+151.835395505" watchObservedRunningTime="2025-12-03 07:29:48.664880718 +0000 UTC m=+151.838238128" Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.693460 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gcf8d" podStartSLOduration=134.693446924 podStartE2EDuration="2m14.693446924s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:48.689568414 +0000 UTC m=+151.862925814" watchObservedRunningTime="2025-12-03 07:29:48.693446924 +0000 UTC m=+151.866804324" Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.730526 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:48 crc kubenswrapper[4612]: E1203 07:29:48.731026 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:49.230930541 +0000 UTC m=+152.404287941 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.792291 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8764s" podStartSLOduration=133.792273682 podStartE2EDuration="2m13.792273682s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:48.732979453 +0000 UTC m=+151.906336863" watchObservedRunningTime="2025-12-03 07:29:48.792273682 +0000 UTC m=+151.965631082" Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.834762 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:48 crc kubenswrapper[4612]: E1203 07:29:48.835517 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:49.335501306 +0000 UTC m=+152.508858706 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.849226 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-bl5ck" podStartSLOduration=133.849207119 podStartE2EDuration="2m13.849207119s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:48.846628103 +0000 UTC m=+152.019985503" watchObservedRunningTime="2025-12-03 07:29:48.849207119 +0000 UTC m=+152.022564519" Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.928266 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:29:48 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:29:48 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:29:48 crc kubenswrapper[4612]: healthz check failed Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.928320 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.936203 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:48 crc kubenswrapper[4612]: E1203 07:29:48.936499 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:49.436484519 +0000 UTC m=+152.609841919 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:48 crc kubenswrapper[4612]: I1203 07:29:48.965815 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" podStartSLOduration=134.965786294 podStartE2EDuration="2m14.965786294s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:48.957225943 +0000 UTC m=+152.130583343" watchObservedRunningTime="2025-12-03 07:29:48.965786294 +0000 UTC m=+152.139143714" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.038295 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:49 crc kubenswrapper[4612]: E1203 07:29:49.038793 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:49.538771385 +0000 UTC m=+152.712128785 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.092878 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-69zqk" podStartSLOduration=8.09286277 podStartE2EDuration="8.09286277s" podCreationTimestamp="2025-12-03 07:29:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:49.091894005 +0000 UTC m=+152.265251405" watchObservedRunningTime="2025-12-03 07:29:49.09286277 +0000 UTC m=+152.266220170" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.095667 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dzht7" podStartSLOduration=134.095658002 podStartE2EDuration="2m14.095658002s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:49.02885946 +0000 UTC m=+152.202216870" watchObservedRunningTime="2025-12-03 07:29:49.095658002 +0000 UTC m=+152.269015402" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.140107 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:49 crc kubenswrapper[4612]: E1203 07:29:49.140508 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:49.640487007 +0000 UTC m=+152.813844407 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.242092 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:49 crc kubenswrapper[4612]: E1203 07:29:49.242430 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:49.742418675 +0000 UTC m=+152.915776075 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.348519 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:49 crc kubenswrapper[4612]: E1203 07:29:49.348670 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:49.848648383 +0000 UTC m=+153.022005783 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.349150 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:49 crc kubenswrapper[4612]: E1203 07:29:49.349502 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:49.849487744 +0000 UTC m=+153.022845144 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.450525 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:49 crc kubenswrapper[4612]: E1203 07:29:49.451154 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:49.951135054 +0000 UTC m=+153.124492454 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.533739 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" event={"ID":"943b34dd-b66a-45e0-919a-57b22def2aa6","Type":"ContainerStarted","Data":"0fc8240cbb0674ffd33bf2c1e0c400b1d60166239941b09778493155e5de1f30"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.553990 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:49 crc kubenswrapper[4612]: E1203 07:29:49.554357 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:50.054341335 +0000 UTC m=+153.227698735 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.573187 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-c2f7j" event={"ID":"e042ad85-b69e-4239-a7eb-c8da5b0fefe8","Type":"ContainerStarted","Data":"c385f3b271ca6ac35150381ce486e61194c308ce1c7abe9ee3c70d01235cac69"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.600197 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52" event={"ID":"166fff6a-f5bb-4675-86cb-1be1c8b5ed7a","Type":"ContainerStarted","Data":"693ab60acf096c25f0fdf5632d8871bc7e3defaa7e2ccb480461536ffbf63c0b"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.603571 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m" event={"ID":"44f8faad-fba1-499c-8115-e943359cf8b5","Type":"ContainerStarted","Data":"29c1fab684494f7013e981e8c1a38572678e68aae335b339a9ff8a107fa61554"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.613753 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-k6tc4" event={"ID":"2b94e895-312e-493f-9720-82e1bffabf02","Type":"ContainerStarted","Data":"fb712138687613dfb5da01979b14de55ebf724fdeaf6497a999fa319d3673378"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.614485 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-k6tc4" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.616436 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dzht7" event={"ID":"db28a622-4b74-49e9-bd91-6f2a253583fe","Type":"ContainerStarted","Data":"0b086a602535bd26b6b5dce361e88fd4ba692df560918dd6e5613ea141c577ea"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.628192 4612 patch_prober.go:28] interesting pod/downloads-7954f5f757-k6tc4 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.628250 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-k6tc4" podUID="2b94e895-312e-493f-9720-82e1bffabf02" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.632602 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" event={"ID":"0a9d1713-389c-4010-b725-3e51fbd8750d","Type":"ContainerStarted","Data":"eb3034456614a776c289093a9ad443a76c270c47c063bf5c547161b614a498c2"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.633153 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.634601 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq" event={"ID":"72335a3a-bb68-48fc-aee8-833a73ea5991","Type":"ContainerStarted","Data":"0232abf909f4926b1133c93c3968a586d41c0f36e67c156f3ffcb005f55f0825"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.643381 4612 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-r42pw container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.643435 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" podUID="0a9d1713-389c-4010-b725-3e51fbd8750d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.649380 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5mk52" podStartSLOduration=134.649359424 podStartE2EDuration="2m14.649359424s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:49.646306345 +0000 UTC m=+152.819663745" watchObservedRunningTime="2025-12-03 07:29:49.649359424 +0000 UTC m=+152.822716824" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.656966 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:49 crc kubenswrapper[4612]: E1203 07:29:49.658107 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:50.158087179 +0000 UTC m=+153.331444579 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.658633 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-477tn" event={"ID":"6bae73b0-37ed-4404-935d-c9afce883fd2","Type":"ContainerStarted","Data":"14619e93d6eba85bfbfad82d6a66621fb3b78c99c6319bc3e44a5106a7e5b486"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.665970 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" event={"ID":"5f6b8cd6-7786-458e-93a7-7c4dd363f1c2","Type":"ContainerStarted","Data":"976fa3bb53eeda629b0a3d9f9f95897e8259b15c2a6f8f8b3a7f98451a13e6ff"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.666924 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.678168 4612 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-mtcm2 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.678232 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" podUID="5f6b8cd6-7786-458e-93a7-7c4dd363f1c2" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.687727 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bxgnq" podStartSLOduration=134.687706742 podStartE2EDuration="2m14.687706742s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:49.68761402 +0000 UTC m=+152.860971430" watchObservedRunningTime="2025-12-03 07:29:49.687706742 +0000 UTC m=+152.861064142" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.706157 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-2z2pw" event={"ID":"f42d1c6a-b297-4145-8fe6-e42a1a0114c5","Type":"ContainerStarted","Data":"b4a4b64399705cb66b6906ea6857a79621582153d44b4702289f75e4f1b38778"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.724212 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6h5hf" event={"ID":"3827c500-45e5-47ad-823b-8b709dc59b23","Type":"ContainerStarted","Data":"1b31a1c3ed5ed52f5bebfc5a4ca4e29858def21764de8ae900e8a5587f4df8f4"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.734078 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn" event={"ID":"eeb0f310-3564-404e-aeb5-237cc9267fed","Type":"ContainerStarted","Data":"79cb3bf1a4410d333309a95dd8c83955cf6c916bd48c61b0cb6a36a79ec8c54f"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.762682 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:49 crc kubenswrapper[4612]: E1203 07:29:49.769325 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:50.269309646 +0000 UTC m=+153.442667036 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.777022 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" event={"ID":"5648b2e6-22d6-4a63-b4bd-c28961ef8511","Type":"ContainerStarted","Data":"435ef29fa168c61a8121f2ffc2dbf2bfcea78f987e6a521e2e1be73ab4460dff"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.777849 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.778831 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" podStartSLOduration=134.778818031 podStartE2EDuration="2m14.778818031s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:49.722501119 +0000 UTC m=+152.895858529" watchObservedRunningTime="2025-12-03 07:29:49.778818031 +0000 UTC m=+152.952175431" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.786765 4612 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-mc86r container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.786817 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" podUID="5648b2e6-22d6-4a63-b4bd-c28961ef8511" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.787613 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-k6tc4" podStartSLOduration=135.779017876 podStartE2EDuration="2m15.779017876s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:49.774248703 +0000 UTC m=+152.947606103" watchObservedRunningTime="2025-12-03 07:29:49.779017876 +0000 UTC m=+152.952375286" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.804282 4612 generic.go:334] "Generic (PLEG): container finished" podID="04bcb431-3e3f-46ab-be09-7f8299ba2ea3" containerID="15e6e58a312e0126d68601404a59d9a598128ef7dd9c385173a4dadfb4cdb062" exitCode=0 Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.804377 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" event={"ID":"04bcb431-3e3f-46ab-be09-7f8299ba2ea3","Type":"ContainerStarted","Data":"e41d12e04e379ea6b6e9c15831099d2410ef079859096e9fa4d3c7d760afd47a"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.804404 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" event={"ID":"04bcb431-3e3f-46ab-be09-7f8299ba2ea3","Type":"ContainerDied","Data":"15e6e58a312e0126d68601404a59d9a598128ef7dd9c385173a4dadfb4cdb062"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.804984 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.816494 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" podStartSLOduration=134.816475811 podStartE2EDuration="2m14.816475811s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:49.810480307 +0000 UTC m=+152.983837707" watchObservedRunningTime="2025-12-03 07:29:49.816475811 +0000 UTC m=+152.989833211" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.846557 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc" event={"ID":"b8ed0ad2-dcc9-459d-a0a8-7d854a591d79","Type":"ContainerStarted","Data":"a0b9157c7d9f7d6bf105434442353b95fc8e89fdd03993e6722697dddccd2684"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.846608 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc" event={"ID":"b8ed0ad2-dcc9-459d-a0a8-7d854a591d79","Type":"ContainerStarted","Data":"ddb0389b1f4fe73de51f16ce44d17ba06ce47fbf584ac4ea839355e43d93b3b6"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.852042 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-l2vxn" podStartSLOduration=134.852026598 podStartE2EDuration="2m14.852026598s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:49.850129759 +0000 UTC m=+153.023487159" watchObservedRunningTime="2025-12-03 07:29:49.852026598 +0000 UTC m=+153.025383998" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.863589 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:49 crc kubenswrapper[4612]: E1203 07:29:49.864788 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:50.364772696 +0000 UTC m=+153.538130096 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.870254 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw" event={"ID":"69d3e05f-20c9-47b9-a40d-29df9a8e1105","Type":"ContainerStarted","Data":"a5adf9d2c97b8801f729a8c4967976b59a6b9080a04d70d6aca169e93048b8f0"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.872146 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" event={"ID":"749db599-e347-4a7b-9ff8-9c33514ee64a","Type":"ContainerStarted","Data":"e3aa13ff474c9ccfbc7f8019f61b664aa791ee2b9f9b965e5b29654b66e6c99a"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.872980 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-ft54k" event={"ID":"4437c69a-44f5-4e28-a3a1-71efc015d6a7","Type":"ContainerStarted","Data":"b08cc544e3319cedf733fd20a6e1159a8c4005feb64dc0a1a5f9953a0e9d0d6c"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.891282 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-477tn" podStartSLOduration=135.891266029 podStartE2EDuration="2m15.891266029s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:49.889429702 +0000 UTC m=+153.062787112" watchObservedRunningTime="2025-12-03 07:29:49.891266029 +0000 UTC m=+153.064623439" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.918344 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj" event={"ID":"e7a8152f-0346-4350-a56e-af6018afe93b","Type":"ContainerStarted","Data":"f4e547e1e0a19e0912f9d5e0b7e57134ae553bceb6d9f40e687c9570c24176c9"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.924010 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:29:49 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:29:49 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:29:49 crc kubenswrapper[4612]: healthz check failed Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.924063 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.935325 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7pdzw" event={"ID":"edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7","Type":"ContainerStarted","Data":"f8726978b28c542dc08c1b4ac327b0d66fb49ded0c986bf63b452e75ed05dcb1"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.937814 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-2z2pw" podStartSLOduration=8.937799629 podStartE2EDuration="8.937799629s" podCreationTimestamp="2025-12-03 07:29:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:49.934472093 +0000 UTC m=+153.107829493" watchObservedRunningTime="2025-12-03 07:29:49.937799629 +0000 UTC m=+153.111157039" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.953277 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" event={"ID":"44eb6f69-caac-40de-ae3c-23755e409aba","Type":"ContainerStarted","Data":"0ac02b9ff95ccc87871e630785170bcdb9982117697cdd5f379ba18b8344c6b7"} Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.953345 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.964231 4612 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-s2lkt container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.964296 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" podUID="44eb6f69-caac-40de-ae3c-23755e409aba" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.967750 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:49 crc kubenswrapper[4612]: E1203 07:29:49.970499 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:50.470486841 +0000 UTC m=+153.643844241 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:49 crc kubenswrapper[4612]: I1203 07:29:49.986478 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bpbwj" podStartSLOduration=134.986464993 podStartE2EDuration="2m14.986464993s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:49.985412306 +0000 UTC m=+153.158769706" watchObservedRunningTime="2025-12-03 07:29:49.986464993 +0000 UTC m=+153.159822393" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.010269 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.067345 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-b2z4s" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.069331 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.069675 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:50.569650317 +0000 UTC m=+153.743007717 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.070639 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.092205 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:50.592190818 +0000 UTC m=+153.765548218 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.094079 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" podStartSLOduration=136.094044346 podStartE2EDuration="2m16.094044346s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:50.037584151 +0000 UTC m=+153.210941551" watchObservedRunningTime="2025-12-03 07:29:50.094044346 +0000 UTC m=+153.267401756" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.111689 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-d2tjw" podStartSLOduration=135.11167113 podStartE2EDuration="2m15.11167113s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:50.069837242 +0000 UTC m=+153.243194642" watchObservedRunningTime="2025-12-03 07:29:50.11167113 +0000 UTC m=+153.285028530" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.127741 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d9ftc" podStartSLOduration=136.127718954 podStartE2EDuration="2m16.127718954s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:50.112544693 +0000 UTC m=+153.285902093" watchObservedRunningTime="2025-12-03 07:29:50.127718954 +0000 UTC m=+153.301076354" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.173728 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.174186 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:50.674169761 +0000 UTC m=+153.847527161 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.193463 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" podStartSLOduration=135.193446408 podStartE2EDuration="2m15.193446408s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:50.168401793 +0000 UTC m=+153.341759363" watchObservedRunningTime="2025-12-03 07:29:50.193446408 +0000 UTC m=+153.366803808" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.193656 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-ft54k" podStartSLOduration=135.193651103 podStartE2EDuration="2m15.193651103s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:50.191541629 +0000 UTC m=+153.364899049" watchObservedRunningTime="2025-12-03 07:29:50.193651103 +0000 UTC m=+153.367008503" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.277656 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.277993 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:50.777982247 +0000 UTC m=+153.951339647 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.381188 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" podStartSLOduration=135.381173816 podStartE2EDuration="2m15.381173816s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:50.254758109 +0000 UTC m=+153.428115509" watchObservedRunningTime="2025-12-03 07:29:50.381173816 +0000 UTC m=+153.554531216" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.382396 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.382791 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:50.882775887 +0000 UTC m=+154.056133277 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.484855 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.485323 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:50.98530594 +0000 UTC m=+154.158663340 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.585756 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.585854 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:51.085838811 +0000 UTC m=+154.259196211 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.586045 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.586306 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:51.086299533 +0000 UTC m=+154.259656933 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.688265 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.688443 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:51.188402825 +0000 UTC m=+154.361760225 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.688742 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.689014 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:51.189002481 +0000 UTC m=+154.362359881 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.790253 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.790436 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:51.290406344 +0000 UTC m=+154.463763744 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.790781 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.791138 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:51.291131083 +0000 UTC m=+154.464488483 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.891826 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.892042 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:51.392007163 +0000 UTC m=+154.565364573 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.892092 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.892510 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:51.392495516 +0000 UTC m=+154.565852916 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.919519 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:29:50 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:29:50 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:29:50 crc kubenswrapper[4612]: healthz check failed Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.919569 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.957233 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" event={"ID":"749db599-e347-4a7b-9ff8-9c33514ee64a","Type":"ContainerStarted","Data":"7015ea81c4923156f81a5dbf196d74e9251b7b4da93e7d620fb5957abcf67436"} Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.958406 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-48rpf" event={"ID":"84f9b984-d4ee-44e0-8d47-c7abf063eafd","Type":"ContainerStarted","Data":"089830d10ab79ac6e24ec704ab830da53d8f733b0b7535c0d013b2a7ab8359ea"} Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.959625 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7pdzw" event={"ID":"edb4cd28-ef00-4ba5-b8c4-a9d4f91a2ac7","Type":"ContainerStarted","Data":"7da01d62fd52f47ee84760d83ccaad0a327bacdfc8b6a740db8487475c9c7376"} Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.962168 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m" event={"ID":"44f8faad-fba1-499c-8115-e943359cf8b5","Type":"ContainerStarted","Data":"770d12680c25f3e62d4dc027611712fc27caf96b6f47e9d07080ba6415823f68"} Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.962510 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.963598 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6h5hf" event={"ID":"3827c500-45e5-47ad-823b-8b709dc59b23","Type":"ContainerStarted","Data":"d45654a0e1b235bf5c4619f4827b5f504c067606bb7d87f4e541e8985a7cdb5e"} Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.965158 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" event={"ID":"943b34dd-b66a-45e0-919a-57b22def2aa6","Type":"ContainerStarted","Data":"25c87bd1418c30c419ac2e88af9698a3c51495314fdaa5d8dc3a97e64036a796"} Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.966979 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" event={"ID":"0bf79614-f448-4a49-bbc7-49da6763842f","Type":"ContainerStarted","Data":"88bea7692b37078578b8c2b00588bcec12cfba2145f84d87d26ed75c4e88a99d"} Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.967034 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" event={"ID":"0bf79614-f448-4a49-bbc7-49da6763842f","Type":"ContainerStarted","Data":"b78d48acb6f4d7116d6943a107a083223f164230186373bcb1db7be9de3f580c"} Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.968651 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" event={"ID":"44e16d27-e50e-4140-a860-b876365c09ca","Type":"ContainerStarted","Data":"f8f02670d2ab06452d1be06f0410e2c43447d7c0840c8db5b26381f00666caf1"} Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.970414 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-c2f7j" event={"ID":"e042ad85-b69e-4239-a7eb-c8da5b0fefe8","Type":"ContainerStarted","Data":"85f8b6894e9eb2d7139bcc571afd788f915060d3305015c6a7546dc94fa31aae"} Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.970439 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-c2f7j" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.972494 4612 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-r42pw container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.972504 4612 patch_prober.go:28] interesting pod/downloads-7954f5f757-k6tc4 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.972528 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" podUID="0a9d1713-389c-4010-b725-3e51fbd8750d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.972548 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-k6tc4" podUID="2b94e895-312e-493f-9720-82e1bffabf02" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.980384 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s2lkt" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.983257 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-9vwq6" podStartSLOduration=135.983242165 podStartE2EDuration="2m15.983242165s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:50.982878255 +0000 UTC m=+154.156235655" watchObservedRunningTime="2025-12-03 07:29:50.983242165 +0000 UTC m=+154.156599565" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.988623 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mtcm2" Dec 03 07:29:50 crc kubenswrapper[4612]: I1203 07:29:50.993263 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:50 crc kubenswrapper[4612]: E1203 07:29:50.993659 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:51.493639623 +0000 UTC m=+154.666997023 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.061140 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m" podStartSLOduration=136.061123082 podStartE2EDuration="2m16.061123082s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:51.058988597 +0000 UTC m=+154.232346007" watchObservedRunningTime="2025-12-03 07:29:51.061123082 +0000 UTC m=+154.234480482" Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.094981 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.098338 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:51.598323181 +0000 UTC m=+154.771680581 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.146142 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-7pdzw" podStartSLOduration=136.146125773 podStartE2EDuration="2m16.146125773s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:51.088872748 +0000 UTC m=+154.262230168" watchObservedRunningTime="2025-12-03 07:29:51.146125773 +0000 UTC m=+154.319483173" Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.201880 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6h5hf" podStartSLOduration=136.20186441 podStartE2EDuration="2m16.20186441s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:51.148217327 +0000 UTC m=+154.321574727" watchObservedRunningTime="2025-12-03 07:29:51.20186441 +0000 UTC m=+154.375221810" Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.203090 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.203515 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:51.703495202 +0000 UTC m=+154.876852602 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.203870 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" podStartSLOduration=137.203862832 podStartE2EDuration="2m17.203862832s" podCreationTimestamp="2025-12-03 07:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:51.202421764 +0000 UTC m=+154.375779194" watchObservedRunningTime="2025-12-03 07:29:51.203862832 +0000 UTC m=+154.377220222" Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.289537 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4sqvm" podStartSLOduration=136.289519379 podStartE2EDuration="2m16.289519379s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:51.288498603 +0000 UTC m=+154.461856013" watchObservedRunningTime="2025-12-03 07:29:51.289519379 +0000 UTC m=+154.462876779" Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.290764 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-c2f7j" podStartSLOduration=10.290758971 podStartE2EDuration="10.290758971s" podCreationTimestamp="2025-12-03 07:29:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:51.24492114 +0000 UTC m=+154.418278540" watchObservedRunningTime="2025-12-03 07:29:51.290758971 +0000 UTC m=+154.464116381" Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.304613 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.305024 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:51.805009969 +0000 UTC m=+154.978367379 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.385114 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" podStartSLOduration=136.385092023 podStartE2EDuration="2m16.385092023s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:51.382062525 +0000 UTC m=+154.555419925" watchObservedRunningTime="2025-12-03 07:29:51.385092023 +0000 UTC m=+154.558449443" Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.405190 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.405445 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:51.905414187 +0000 UTC m=+155.078771607 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.405530 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.405924 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:51.90591153 +0000 UTC m=+155.079268990 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.408609 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mc86r" Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.506864 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.507094 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.007062687 +0000 UTC m=+155.180420087 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.507148 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.507476 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.007463307 +0000 UTC m=+155.180820697 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.608406 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.608571 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.108545353 +0000 UTC m=+155.281902753 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.608628 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.609122 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.109115347 +0000 UTC m=+155.282472747 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.630642 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.710509 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.710663 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.210638284 +0000 UTC m=+155.383995684 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.710871 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.711414 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.211394514 +0000 UTC m=+155.384751974 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.811604 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.811784 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.31173621 +0000 UTC m=+155.485093620 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.812030 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.812366 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.312354086 +0000 UTC m=+155.485711486 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.894861 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nj5zt"] Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.896203 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.898998 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.907599 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nj5zt"] Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.913096 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.913262 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.413238006 +0000 UTC m=+155.586595406 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.913311 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:51 crc kubenswrapper[4612]: E1203 07:29:51.913646 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.413639207 +0000 UTC m=+155.586996607 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.920696 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:29:51 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:29:51 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:29:51 crc kubenswrapper[4612]: healthz check failed Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.920749 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.975672 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-48rpf" event={"ID":"84f9b984-d4ee-44e0-8d47-c7abf063eafd","Type":"ContainerStarted","Data":"e68af93e6cf55bf7a06776e599e1e824f2b751f9a9fd40d7e2f4f47886e1659a"} Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.977354 4612 patch_prober.go:28] interesting pod/downloads-7954f5f757-k6tc4 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 03 07:29:51 crc kubenswrapper[4612]: I1203 07:29:51.977418 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-k6tc4" podUID="2b94e895-312e-493f-9720-82e1bffabf02" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.015021 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.015187 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.515161783 +0000 UTC m=+155.688519183 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.015357 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdd62db3-0bae-4037-915e-a339d0ecc7a6-utilities\") pod \"certified-operators-nj5zt\" (UID: \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\") " pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.015405 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdd62db3-0bae-4037-915e-a339d0ecc7a6-catalog-content\") pod \"certified-operators-nj5zt\" (UID: \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\") " pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.015438 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wt7tx\" (UniqueName: \"kubernetes.io/projected/cdd62db3-0bae-4037-915e-a339d0ecc7a6-kube-api-access-wt7tx\") pod \"certified-operators-nj5zt\" (UID: \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\") " pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.015616 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.015952 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.515930763 +0000 UTC m=+155.689288153 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.090384 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-88rgv"] Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.091529 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.096709 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.117157 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.117304 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.617279116 +0000 UTC m=+155.790636516 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.117421 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wt7tx\" (UniqueName: \"kubernetes.io/projected/cdd62db3-0bae-4037-915e-a339d0ecc7a6-kube-api-access-wt7tx\") pod \"certified-operators-nj5zt\" (UID: \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\") " pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.117676 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.118327 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdd62db3-0bae-4037-915e-a339d0ecc7a6-utilities\") pod \"certified-operators-nj5zt\" (UID: \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\") " pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.118456 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdd62db3-0bae-4037-915e-a339d0ecc7a6-catalog-content\") pod \"certified-operators-nj5zt\" (UID: \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\") " pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.125409 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdd62db3-0bae-4037-915e-a339d0ecc7a6-utilities\") pod \"certified-operators-nj5zt\" (UID: \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\") " pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.126271 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdd62db3-0bae-4037-915e-a339d0ecc7a6-catalog-content\") pod \"certified-operators-nj5zt\" (UID: \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\") " pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.126358 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.626346609 +0000 UTC m=+155.799704009 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.160746 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-88rgv"] Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.174848 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wt7tx\" (UniqueName: \"kubernetes.io/projected/cdd62db3-0bae-4037-915e-a339d0ecc7a6-kube-api-access-wt7tx\") pod \"certified-operators-nj5zt\" (UID: \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\") " pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.210515 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.225253 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.225512 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msg54\" (UniqueName: \"kubernetes.io/projected/75a8ee2e-4160-4458-9107-9d2a276edb94-kube-api-access-msg54\") pod \"community-operators-88rgv\" (UID: \"75a8ee2e-4160-4458-9107-9d2a276edb94\") " pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.225566 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75a8ee2e-4160-4458-9107-9d2a276edb94-utilities\") pod \"community-operators-88rgv\" (UID: \"75a8ee2e-4160-4458-9107-9d2a276edb94\") " pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.225615 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.725587307 +0000 UTC m=+155.898944707 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.225656 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.225774 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75a8ee2e-4160-4458-9107-9d2a276edb94-catalog-content\") pod \"community-operators-88rgv\" (UID: \"75a8ee2e-4160-4458-9107-9d2a276edb94\") " pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.226092 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.72608516 +0000 UTC m=+155.899442560 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.303907 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mj9vl"] Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.304863 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.319046 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mj9vl"] Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.326624 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.326781 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.826751975 +0000 UTC m=+156.000109385 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.326836 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75a8ee2e-4160-4458-9107-9d2a276edb94-catalog-content\") pod \"community-operators-88rgv\" (UID: \"75a8ee2e-4160-4458-9107-9d2a276edb94\") " pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.327016 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msg54\" (UniqueName: \"kubernetes.io/projected/75a8ee2e-4160-4458-9107-9d2a276edb94-kube-api-access-msg54\") pod \"community-operators-88rgv\" (UID: \"75a8ee2e-4160-4458-9107-9d2a276edb94\") " pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.327084 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75a8ee2e-4160-4458-9107-9d2a276edb94-utilities\") pod \"community-operators-88rgv\" (UID: \"75a8ee2e-4160-4458-9107-9d2a276edb94\") " pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.327145 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.327467 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.827460353 +0000 UTC m=+156.000817753 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.328786 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75a8ee2e-4160-4458-9107-9d2a276edb94-utilities\") pod \"community-operators-88rgv\" (UID: \"75a8ee2e-4160-4458-9107-9d2a276edb94\") " pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.329170 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75a8ee2e-4160-4458-9107-9d2a276edb94-catalog-content\") pod \"community-operators-88rgv\" (UID: \"75a8ee2e-4160-4458-9107-9d2a276edb94\") " pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.374611 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msg54\" (UniqueName: \"kubernetes.io/projected/75a8ee2e-4160-4458-9107-9d2a276edb94-kube-api-access-msg54\") pod \"community-operators-88rgv\" (UID: \"75a8ee2e-4160-4458-9107-9d2a276edb94\") " pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.417172 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.428299 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.428457 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.928423896 +0000 UTC m=+156.101781306 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.428551 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eaec957-909d-4e80-9bd2-bf765a70a1b1-utilities\") pod \"certified-operators-mj9vl\" (UID: \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\") " pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.428614 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.428644 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqngr\" (UniqueName: \"kubernetes.io/projected/3eaec957-909d-4e80-9bd2-bf765a70a1b1-kube-api-access-dqngr\") pod \"certified-operators-mj9vl\" (UID: \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\") " pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.428696 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eaec957-909d-4e80-9bd2-bf765a70a1b1-catalog-content\") pod \"certified-operators-mj9vl\" (UID: \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\") " pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.429195 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:52.929186915 +0000 UTC m=+156.102544315 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.496207 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9r8gz"] Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.497387 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.513205 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9r8gz"] Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.530514 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.530733 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:53.030695352 +0000 UTC m=+156.204052752 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.530807 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eaec957-909d-4e80-9bd2-bf765a70a1b1-catalog-content\") pod \"certified-operators-mj9vl\" (UID: \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\") " pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.531028 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eaec957-909d-4e80-9bd2-bf765a70a1b1-utilities\") pod \"certified-operators-mj9vl\" (UID: \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\") " pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.531118 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.531146 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqngr\" (UniqueName: \"kubernetes.io/projected/3eaec957-909d-4e80-9bd2-bf765a70a1b1-kube-api-access-dqngr\") pod \"certified-operators-mj9vl\" (UID: \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\") " pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.531495 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eaec957-909d-4e80-9bd2-bf765a70a1b1-catalog-content\") pod \"certified-operators-mj9vl\" (UID: \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\") " pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.531539 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eaec957-909d-4e80-9bd2-bf765a70a1b1-utilities\") pod \"certified-operators-mj9vl\" (UID: \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\") " pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.531930 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:53.031908373 +0000 UTC m=+156.205265853 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.562887 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqngr\" (UniqueName: \"kubernetes.io/projected/3eaec957-909d-4e80-9bd2-bf765a70a1b1-kube-api-access-dqngr\") pod \"certified-operators-mj9vl\" (UID: \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\") " pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.632407 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.632497 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:53.132483145 +0000 UTC m=+156.305840545 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.632435 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.632755 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.632805 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbrqd\" (UniqueName: \"kubernetes.io/projected/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-kube-api-access-dbrqd\") pod \"community-operators-9r8gz\" (UID: \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\") " pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.632865 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-utilities\") pod \"community-operators-9r8gz\" (UID: \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\") " pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.632898 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-catalog-content\") pod \"community-operators-9r8gz\" (UID: \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\") " pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.633197 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:53.133186554 +0000 UTC m=+156.306543964 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.733563 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.734074 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-utilities\") pod \"community-operators-9r8gz\" (UID: \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\") " pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.734122 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-catalog-content\") pod \"community-operators-9r8gz\" (UID: \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\") " pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.734169 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbrqd\" (UniqueName: \"kubernetes.io/projected/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-kube-api-access-dbrqd\") pod \"community-operators-9r8gz\" (UID: \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\") " pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.734865 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:53.234850984 +0000 UTC m=+156.408208384 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.735300 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-utilities\") pod \"community-operators-9r8gz\" (UID: \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\") " pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.735549 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-catalog-content\") pod \"community-operators-9r8gz\" (UID: \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\") " pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.803360 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbrqd\" (UniqueName: \"kubernetes.io/projected/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-kube-api-access-dbrqd\") pod \"community-operators-9r8gz\" (UID: \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\") " pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.816351 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.844620 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.844996 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:53.344983723 +0000 UTC m=+156.518341123 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.938683 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:29:52 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:29:52 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:29:52 crc kubenswrapper[4612]: healthz check failed Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.938733 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.943345 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xrwls" Dec 03 07:29:52 crc kubenswrapper[4612]: I1203 07:29:52.945102 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:52 crc kubenswrapper[4612]: E1203 07:29:52.945417 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:53.445403941 +0000 UTC m=+156.618761341 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.026138 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-48rpf" event={"ID":"84f9b984-d4ee-44e0-8d47-c7abf063eafd","Type":"ContainerStarted","Data":"680b8e8090d3a724dc963878ac48d2b1f23fc0dfaec3c5792c171d9799f5cc11"} Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.046642 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:53 crc kubenswrapper[4612]: E1203 07:29:53.047014 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:53.54700247 +0000 UTC m=+156.720359870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.049825 4612 generic.go:334] "Generic (PLEG): container finished" podID="df610433-e46b-4098-9b66-0fbf5a28899f" containerID="0313b89a01b1e88c497bb0987bf04b83339efe89aefc5300569173f691408c19" exitCode=0 Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.051403 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" event={"ID":"df610433-e46b-4098-9b66-0fbf5a28899f","Type":"ContainerDied","Data":"0313b89a01b1e88c497bb0987bf04b83339efe89aefc5300569173f691408c19"} Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.130137 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nj5zt"] Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.147789 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:53 crc kubenswrapper[4612]: E1203 07:29:53.148820 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:53.648803264 +0000 UTC m=+156.822160664 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.175099 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-88rgv"] Dec 03 07:29:53 crc kubenswrapper[4612]: W1203 07:29:53.211434 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcdd62db3_0bae_4037_915e_a339d0ecc7a6.slice/crio-5548a8291416cdb58901d36360bced8fb53d1d39f5b312f95f419138558d73f5 WatchSource:0}: Error finding container 5548a8291416cdb58901d36360bced8fb53d1d39f5b312f95f419138558d73f5: Status 404 returned error can't find the container with id 5548a8291416cdb58901d36360bced8fb53d1d39f5b312f95f419138558d73f5 Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.259851 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:53 crc kubenswrapper[4612]: E1203 07:29:53.260432 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:53.760415641 +0000 UTC m=+156.933773041 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.315353 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.315382 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.362057 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:53 crc kubenswrapper[4612]: E1203 07:29:53.362727 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:53.862712058 +0000 UTC m=+157.036069448 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.369160 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.396909 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.397037 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.465447 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:53 crc kubenswrapper[4612]: E1203 07:29:53.466631 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:53.966612476 +0000 UTC m=+157.139969876 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.488387 4612 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.530898 4612 patch_prober.go:28] interesting pod/downloads-7954f5f757-k6tc4 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.530957 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-k6tc4" podUID="2b94e895-312e-493f-9720-82e1bffabf02" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.531031 4612 patch_prober.go:28] interesting pod/downloads-7954f5f757-k6tc4 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.531047 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-k6tc4" podUID="2b94e895-312e-493f-9720-82e1bffabf02" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.537122 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mj9vl"] Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.566354 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:53 crc kubenswrapper[4612]: E1203 07:29:53.568262 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:54.068239945 +0000 UTC m=+157.241597345 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.577128 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.577286 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.597697 4612 patch_prober.go:28] interesting pod/console-f9d7485db-477tn container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.599468 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-477tn" podUID="6bae73b0-37ed-4404-935d-c9afce883fd2" containerName="console" probeResult="failure" output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.665829 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9r8gz"] Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.667973 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:53 crc kubenswrapper[4612]: E1203 07:29:53.668975 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:54.168962982 +0000 UTC m=+157.342320382 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.771406 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:53 crc kubenswrapper[4612]: E1203 07:29:53.772210 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:54.272193493 +0000 UTC m=+157.445550883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.873355 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:53 crc kubenswrapper[4612]: E1203 07:29:53.873785 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:54.373770361 +0000 UTC m=+157.547127761 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.916519 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.920975 4612 patch_prober.go:28] interesting pod/apiserver-76f77b778f-9rnzg container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 03 07:29:53 crc kubenswrapper[4612]: [+]log ok Dec 03 07:29:53 crc kubenswrapper[4612]: [+]etcd ok Dec 03 07:29:53 crc kubenswrapper[4612]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 03 07:29:53 crc kubenswrapper[4612]: [+]poststarthook/generic-apiserver-start-informers ok Dec 03 07:29:53 crc kubenswrapper[4612]: [+]poststarthook/max-in-flight-filter ok Dec 03 07:29:53 crc kubenswrapper[4612]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 03 07:29:53 crc kubenswrapper[4612]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 03 07:29:53 crc kubenswrapper[4612]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 03 07:29:53 crc kubenswrapper[4612]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 03 07:29:53 crc kubenswrapper[4612]: [+]poststarthook/project.openshift.io-projectcache ok Dec 03 07:29:53 crc kubenswrapper[4612]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 03 07:29:53 crc kubenswrapper[4612]: [+]poststarthook/openshift.io-startinformers ok Dec 03 07:29:53 crc kubenswrapper[4612]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 03 07:29:53 crc kubenswrapper[4612]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 03 07:29:53 crc kubenswrapper[4612]: livez check failed Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.921034 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" podUID="0bf79614-f448-4a49-bbc7-49da6763842f" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.922023 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:29:53 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:29:53 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:29:53 crc kubenswrapper[4612]: healthz check failed Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.922202 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.974339 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:53 crc kubenswrapper[4612]: E1203 07:29:53.974610 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:54.474584098 +0000 UTC m=+157.647941498 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:53 crc kubenswrapper[4612]: I1203 07:29:53.974871 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:53 crc kubenswrapper[4612]: E1203 07:29:53.975170 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:54.475159603 +0000 UTC m=+157.648517003 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.057622 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-48rpf" event={"ID":"84f9b984-d4ee-44e0-8d47-c7abf063eafd","Type":"ContainerStarted","Data":"78bf5ca0fdf439cb291ab228f06092d5770f4e883832d579fbd0f7ee824d6850"} Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.060723 4612 generic.go:334] "Generic (PLEG): container finished" podID="5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" containerID="e2d14af8c81babe99ebfa03ea823b8636787a52e743d820db87decc4a60ea31c" exitCode=0 Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.060766 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9r8gz" event={"ID":"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7","Type":"ContainerDied","Data":"e2d14af8c81babe99ebfa03ea823b8636787a52e743d820db87decc4a60ea31c"} Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.060879 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9r8gz" event={"ID":"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7","Type":"ContainerStarted","Data":"09ed3a57ddd84c7645a832dfe5c2334418af769eb30424ef1e25abf383d72b80"} Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.062194 4612 generic.go:334] "Generic (PLEG): container finished" podID="3eaec957-909d-4e80-9bd2-bf765a70a1b1" containerID="727761e95c64857c9473f8c44aacd4ede8e35effdf5e5cb7c3e6b54600d462e1" exitCode=0 Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.062300 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mj9vl" event={"ID":"3eaec957-909d-4e80-9bd2-bf765a70a1b1","Type":"ContainerDied","Data":"727761e95c64857c9473f8c44aacd4ede8e35effdf5e5cb7c3e6b54600d462e1"} Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.062375 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mj9vl" event={"ID":"3eaec957-909d-4e80-9bd2-bf765a70a1b1","Type":"ContainerStarted","Data":"6bacb085b613bac8de87ea5bf575c62fc5495d0f1c2c40e32b57515054646723"} Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.063491 4612 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.066509 4612 generic.go:334] "Generic (PLEG): container finished" podID="75a8ee2e-4160-4458-9107-9d2a276edb94" containerID="d2f2c99e8092bcdceebb3ecaa604de1a372736e3577dd7f208cd44425eed8a24" exitCode=0 Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.066591 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88rgv" event={"ID":"75a8ee2e-4160-4458-9107-9d2a276edb94","Type":"ContainerDied","Data":"d2f2c99e8092bcdceebb3ecaa604de1a372736e3577dd7f208cd44425eed8a24"} Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.066625 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88rgv" event={"ID":"75a8ee2e-4160-4458-9107-9d2a276edb94","Type":"ContainerStarted","Data":"03ad03c541f45ad0d81c9fef88ceb2064c7514ff372814792408ebf5c30ae762"} Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.068376 4612 generic.go:334] "Generic (PLEG): container finished" podID="cdd62db3-0bae-4037-915e-a339d0ecc7a6" containerID="d9cdc12edf3c858b6fe7977e958de097ad6ecc5004403a725a3fa87dbdaa375a" exitCode=0 Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.068497 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nj5zt" event={"ID":"cdd62db3-0bae-4037-915e-a339d0ecc7a6","Type":"ContainerDied","Data":"d9cdc12edf3c858b6fe7977e958de097ad6ecc5004403a725a3fa87dbdaa375a"} Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.069041 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nj5zt" event={"ID":"cdd62db3-0bae-4037-915e-a339d0ecc7a6","Type":"ContainerStarted","Data":"5548a8291416cdb58901d36360bced8fb53d1d39f5b312f95f419138558d73f5"} Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.075590 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:54 crc kubenswrapper[4612]: E1203 07:29:54.077102 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:54.57708223 +0000 UTC m=+157.750439630 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.083432 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2ldkj" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.090610 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-48rpf" podStartSLOduration=13.090594419 podStartE2EDuration="13.090594419s" podCreationTimestamp="2025-12-03 07:29:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:54.086230626 +0000 UTC m=+157.259588046" watchObservedRunningTime="2025-12-03 07:29:54.090594419 +0000 UTC m=+157.263951819" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.093428 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-r669p"] Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.094601 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.107392 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.116444 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r669p"] Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.177825 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.177889 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faf2b695-863f-4f58-b53a-66aa4addd381-utilities\") pod \"redhat-marketplace-r669p\" (UID: \"faf2b695-863f-4f58-b53a-66aa4addd381\") " pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.178097 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nmrk\" (UniqueName: \"kubernetes.io/projected/faf2b695-863f-4f58-b53a-66aa4addd381-kube-api-access-6nmrk\") pod \"redhat-marketplace-r669p\" (UID: \"faf2b695-863f-4f58-b53a-66aa4addd381\") " pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.178148 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faf2b695-863f-4f58-b53a-66aa4addd381-catalog-content\") pod \"redhat-marketplace-r669p\" (UID: \"faf2b695-863f-4f58-b53a-66aa4addd381\") " pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:29:54 crc kubenswrapper[4612]: E1203 07:29:54.179040 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 07:29:54.679028858 +0000 UTC m=+157.852386258 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-kjpvq" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.279269 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.279511 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nmrk\" (UniqueName: \"kubernetes.io/projected/faf2b695-863f-4f58-b53a-66aa4addd381-kube-api-access-6nmrk\") pod \"redhat-marketplace-r669p\" (UID: \"faf2b695-863f-4f58-b53a-66aa4addd381\") " pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.279547 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faf2b695-863f-4f58-b53a-66aa4addd381-catalog-content\") pod \"redhat-marketplace-r669p\" (UID: \"faf2b695-863f-4f58-b53a-66aa4addd381\") " pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.279598 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faf2b695-863f-4f58-b53a-66aa4addd381-utilities\") pod \"redhat-marketplace-r669p\" (UID: \"faf2b695-863f-4f58-b53a-66aa4addd381\") " pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.280098 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faf2b695-863f-4f58-b53a-66aa4addd381-utilities\") pod \"redhat-marketplace-r669p\" (UID: \"faf2b695-863f-4f58-b53a-66aa4addd381\") " pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.280315 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faf2b695-863f-4f58-b53a-66aa4addd381-catalog-content\") pod \"redhat-marketplace-r669p\" (UID: \"faf2b695-863f-4f58-b53a-66aa4addd381\") " pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:29:54 crc kubenswrapper[4612]: E1203 07:29:54.280761 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 07:29:54.78074214 +0000 UTC m=+157.954099540 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.282991 4612 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-03T07:29:53.488414218Z","Handler":null,"Name":""} Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.287049 4612 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.287159 4612 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.302849 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.332796 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nmrk\" (UniqueName: \"kubernetes.io/projected/faf2b695-863f-4f58-b53a-66aa4addd381-kube-api-access-6nmrk\") pod \"redhat-marketplace-r669p\" (UID: \"faf2b695-863f-4f58-b53a-66aa4addd381\") " pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.381122 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.393576 4612 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.393613 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.417597 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.477496 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-kjpvq\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.482718 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.492065 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.513849 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.513915 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hvvvp"] Dec 03 07:29:54 crc kubenswrapper[4612]: E1203 07:29:54.514147 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df610433-e46b-4098-9b66-0fbf5a28899f" containerName="collect-profiles" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.514163 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="df610433-e46b-4098-9b66-0fbf5a28899f" containerName="collect-profiles" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.514267 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="df610433-e46b-4098-9b66-0fbf5a28899f" containerName="collect-profiles" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.515255 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.518333 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hvvvp"] Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.584700 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/df610433-e46b-4098-9b66-0fbf5a28899f-secret-volume\") pod \"df610433-e46b-4098-9b66-0fbf5a28899f\" (UID: \"df610433-e46b-4098-9b66-0fbf5a28899f\") " Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.584745 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/df610433-e46b-4098-9b66-0fbf5a28899f-config-volume\") pod \"df610433-e46b-4098-9b66-0fbf5a28899f\" (UID: \"df610433-e46b-4098-9b66-0fbf5a28899f\") " Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.584797 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czqsq\" (UniqueName: \"kubernetes.io/projected/df610433-e46b-4098-9b66-0fbf5a28899f-kube-api-access-czqsq\") pod \"df610433-e46b-4098-9b66-0fbf5a28899f\" (UID: \"df610433-e46b-4098-9b66-0fbf5a28899f\") " Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.584999 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11d0417-0152-4c68-871a-b7e40af003e4-catalog-content\") pod \"redhat-marketplace-hvvvp\" (UID: \"b11d0417-0152-4c68-871a-b7e40af003e4\") " pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.585029 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11d0417-0152-4c68-871a-b7e40af003e4-utilities\") pod \"redhat-marketplace-hvvvp\" (UID: \"b11d0417-0152-4c68-871a-b7e40af003e4\") " pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.585045 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vr4c\" (UniqueName: \"kubernetes.io/projected/b11d0417-0152-4c68-871a-b7e40af003e4-kube-api-access-9vr4c\") pod \"redhat-marketplace-hvvvp\" (UID: \"b11d0417-0152-4c68-871a-b7e40af003e4\") " pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.587444 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df610433-e46b-4098-9b66-0fbf5a28899f-config-volume" (OuterVolumeSpecName: "config-volume") pod "df610433-e46b-4098-9b66-0fbf5a28899f" (UID: "df610433-e46b-4098-9b66-0fbf5a28899f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.594575 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df610433-e46b-4098-9b66-0fbf5a28899f-kube-api-access-czqsq" (OuterVolumeSpecName: "kube-api-access-czqsq") pod "df610433-e46b-4098-9b66-0fbf5a28899f" (UID: "df610433-e46b-4098-9b66-0fbf5a28899f"). InnerVolumeSpecName "kube-api-access-czqsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.596283 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df610433-e46b-4098-9b66-0fbf5a28899f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "df610433-e46b-4098-9b66-0fbf5a28899f" (UID: "df610433-e46b-4098-9b66-0fbf5a28899f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.685788 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11d0417-0152-4c68-871a-b7e40af003e4-catalog-content\") pod \"redhat-marketplace-hvvvp\" (UID: \"b11d0417-0152-4c68-871a-b7e40af003e4\") " pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.685839 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11d0417-0152-4c68-871a-b7e40af003e4-utilities\") pod \"redhat-marketplace-hvvvp\" (UID: \"b11d0417-0152-4c68-871a-b7e40af003e4\") " pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.685855 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vr4c\" (UniqueName: \"kubernetes.io/projected/b11d0417-0152-4c68-871a-b7e40af003e4-kube-api-access-9vr4c\") pod \"redhat-marketplace-hvvvp\" (UID: \"b11d0417-0152-4c68-871a-b7e40af003e4\") " pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.686276 4612 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/df610433-e46b-4098-9b66-0fbf5a28899f-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.686293 4612 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/df610433-e46b-4098-9b66-0fbf5a28899f-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.686302 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czqsq\" (UniqueName: \"kubernetes.io/projected/df610433-e46b-4098-9b66-0fbf5a28899f-kube-api-access-czqsq\") on node \"crc\" DevicePath \"\"" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.686330 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11d0417-0152-4c68-871a-b7e40af003e4-utilities\") pod \"redhat-marketplace-hvvvp\" (UID: \"b11d0417-0152-4c68-871a-b7e40af003e4\") " pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.687400 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11d0417-0152-4c68-871a-b7e40af003e4-catalog-content\") pod \"redhat-marketplace-hvvvp\" (UID: \"b11d0417-0152-4c68-871a-b7e40af003e4\") " pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.701391 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vr4c\" (UniqueName: \"kubernetes.io/projected/b11d0417-0152-4c68-871a-b7e40af003e4-kube-api-access-9vr4c\") pod \"redhat-marketplace-hvvvp\" (UID: \"b11d0417-0152-4c68-871a-b7e40af003e4\") " pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.754359 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.837146 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.924719 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r669p"] Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.925881 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:29:54 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:29:54 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:29:54 crc kubenswrapper[4612]: healthz check failed Dec 03 07:29:54 crc kubenswrapper[4612]: I1203 07:29:54.925937 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:54 crc kubenswrapper[4612]: W1203 07:29:54.973516 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfaf2b695_863f_4f58_b53a_66aa4addd381.slice/crio-1ce9bc3d7a4ebd233768e2d4f4f8845ef3827b4bfeff54c99af9910cbfa242a7 WatchSource:0}: Error finding container 1ce9bc3d7a4ebd233768e2d4f4f8845ef3827b4bfeff54c99af9910cbfa242a7: Status 404 returned error can't find the container with id 1ce9bc3d7a4ebd233768e2d4f4f8845ef3827b4bfeff54c99af9910cbfa242a7 Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.064463 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.065390 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.068390 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.068817 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.070517 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-kjpvq"] Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.075190 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.087297 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-k6trx"] Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.093230 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.097811 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.161441 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.163023 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r669p" event={"ID":"faf2b695-863f-4f58-b53a-66aa4addd381","Type":"ContainerStarted","Data":"1ce9bc3d7a4ebd233768e2d4f4f8845ef3827b4bfeff54c99af9910cbfa242a7"} Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.163084 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k6trx"] Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.164892 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.164925 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh" event={"ID":"df610433-e46b-4098-9b66-0fbf5a28899f","Type":"ContainerDied","Data":"61f4c445d9e0921205a918e4063d911dd088cc8604c41ca7bd488b4954d17bef"} Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.164962 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61f4c445d9e0921205a918e4063d911dd088cc8604c41ca7bd488b4954d17bef" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.193884 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3625877f-a938-482d-90ed-f5fce8ef232d-catalog-content\") pod \"redhat-operators-k6trx\" (UID: \"3625877f-a938-482d-90ed-f5fce8ef232d\") " pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.195797 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/782703f2-6f49-436a-9ddb-d9a7d3024d83-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"782703f2-6f49-436a-9ddb-d9a7d3024d83\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.195838 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3625877f-a938-482d-90ed-f5fce8ef232d-utilities\") pod \"redhat-operators-k6trx\" (UID: \"3625877f-a938-482d-90ed-f5fce8ef232d\") " pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.195854 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/782703f2-6f49-436a-9ddb-d9a7d3024d83-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"782703f2-6f49-436a-9ddb-d9a7d3024d83\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.195880 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2txk\" (UniqueName: \"kubernetes.io/projected/3625877f-a938-482d-90ed-f5fce8ef232d-kube-api-access-n2txk\") pod \"redhat-operators-k6trx\" (UID: \"3625877f-a938-482d-90ed-f5fce8ef232d\") " pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.297330 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3625877f-a938-482d-90ed-f5fce8ef232d-utilities\") pod \"redhat-operators-k6trx\" (UID: \"3625877f-a938-482d-90ed-f5fce8ef232d\") " pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.297822 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/782703f2-6f49-436a-9ddb-d9a7d3024d83-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"782703f2-6f49-436a-9ddb-d9a7d3024d83\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.297895 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2txk\" (UniqueName: \"kubernetes.io/projected/3625877f-a938-482d-90ed-f5fce8ef232d-kube-api-access-n2txk\") pod \"redhat-operators-k6trx\" (UID: \"3625877f-a938-482d-90ed-f5fce8ef232d\") " pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.298106 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3625877f-a938-482d-90ed-f5fce8ef232d-catalog-content\") pod \"redhat-operators-k6trx\" (UID: \"3625877f-a938-482d-90ed-f5fce8ef232d\") " pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.298147 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/782703f2-6f49-436a-9ddb-d9a7d3024d83-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"782703f2-6f49-436a-9ddb-d9a7d3024d83\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.298427 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3625877f-a938-482d-90ed-f5fce8ef232d-utilities\") pod \"redhat-operators-k6trx\" (UID: \"3625877f-a938-482d-90ed-f5fce8ef232d\") " pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.299395 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/782703f2-6f49-436a-9ddb-d9a7d3024d83-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"782703f2-6f49-436a-9ddb-d9a7d3024d83\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.299962 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3625877f-a938-482d-90ed-f5fce8ef232d-catalog-content\") pod \"redhat-operators-k6trx\" (UID: \"3625877f-a938-482d-90ed-f5fce8ef232d\") " pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.319010 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2txk\" (UniqueName: \"kubernetes.io/projected/3625877f-a938-482d-90ed-f5fce8ef232d-kube-api-access-n2txk\") pod \"redhat-operators-k6trx\" (UID: \"3625877f-a938-482d-90ed-f5fce8ef232d\") " pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.322702 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/782703f2-6f49-436a-9ddb-d9a7d3024d83-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"782703f2-6f49-436a-9ddb-d9a7d3024d83\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.422617 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.439354 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.452471 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hvvvp"] Dec 03 07:29:55 crc kubenswrapper[4612]: W1203 07:29:55.467284 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb11d0417_0152_4c68_871a_b7e40af003e4.slice/crio-34703deb80cf9acbe12f50abf603df6ee8aa53705c03982a81830e7027561ac4 WatchSource:0}: Error finding container 34703deb80cf9acbe12f50abf603df6ee8aa53705c03982a81830e7027561ac4: Status 404 returned error can't find the container with id 34703deb80cf9acbe12f50abf603df6ee8aa53705c03982a81830e7027561ac4 Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.508166 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p52h2"] Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.545321 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p52h2"] Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.545485 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.602666 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-utilities\") pod \"redhat-operators-p52h2\" (UID: \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\") " pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.602788 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2ngh\" (UniqueName: \"kubernetes.io/projected/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-kube-api-access-q2ngh\") pod \"redhat-operators-p52h2\" (UID: \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\") " pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.602818 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-catalog-content\") pod \"redhat-operators-p52h2\" (UID: \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\") " pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.707693 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2ngh\" (UniqueName: \"kubernetes.io/projected/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-kube-api-access-q2ngh\") pod \"redhat-operators-p52h2\" (UID: \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\") " pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.708098 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-catalog-content\") pod \"redhat-operators-p52h2\" (UID: \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\") " pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.708146 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-utilities\") pod \"redhat-operators-p52h2\" (UID: \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\") " pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.708650 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-utilities\") pod \"redhat-operators-p52h2\" (UID: \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\") " pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.708928 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-catalog-content\") pod \"redhat-operators-p52h2\" (UID: \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\") " pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.740805 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2ngh\" (UniqueName: \"kubernetes.io/projected/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-kube-api-access-q2ngh\") pod \"redhat-operators-p52h2\" (UID: \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\") " pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.865983 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k6trx"] Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.902921 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.909630 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.921775 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:29:55 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:29:55 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:29:55 crc kubenswrapper[4612]: healthz check failed Dec 03 07:29:55 crc kubenswrapper[4612]: I1203 07:29:55.921824 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:55 crc kubenswrapper[4612]: W1203 07:29:55.984804 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod782703f2_6f49_436a_9ddb_d9a7d3024d83.slice/crio-bc6832c8e30992b8fe241022b4be6c91a34be7e3ed3a8411e30e1507b3f38c2f WatchSource:0}: Error finding container bc6832c8e30992b8fe241022b4be6c91a34be7e3ed3a8411e30e1507b3f38c2f: Status 404 returned error can't find the container with id bc6832c8e30992b8fe241022b4be6c91a34be7e3ed3a8411e30e1507b3f38c2f Dec 03 07:29:56 crc kubenswrapper[4612]: I1203 07:29:56.215643 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6trx" event={"ID":"3625877f-a938-482d-90ed-f5fce8ef232d","Type":"ContainerStarted","Data":"0e38f5cb495b8360f9cacae25c11c6013787d6c475aae0e70762aff181f61552"} Dec 03 07:29:56 crc kubenswrapper[4612]: I1203 07:29:56.226504 4612 generic.go:334] "Generic (PLEG): container finished" podID="b11d0417-0152-4c68-871a-b7e40af003e4" containerID="0e1b419e357b26d3cdebc01edf7486ab943effb0c9f8e7cfe4d0b9cfcf4f444f" exitCode=0 Dec 03 07:29:56 crc kubenswrapper[4612]: I1203 07:29:56.226972 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hvvvp" event={"ID":"b11d0417-0152-4c68-871a-b7e40af003e4","Type":"ContainerDied","Data":"0e1b419e357b26d3cdebc01edf7486ab943effb0c9f8e7cfe4d0b9cfcf4f444f"} Dec 03 07:29:56 crc kubenswrapper[4612]: I1203 07:29:56.227027 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hvvvp" event={"ID":"b11d0417-0152-4c68-871a-b7e40af003e4","Type":"ContainerStarted","Data":"34703deb80cf9acbe12f50abf603df6ee8aa53705c03982a81830e7027561ac4"} Dec 03 07:29:56 crc kubenswrapper[4612]: I1203 07:29:56.256357 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"782703f2-6f49-436a-9ddb-d9a7d3024d83","Type":"ContainerStarted","Data":"bc6832c8e30992b8fe241022b4be6c91a34be7e3ed3a8411e30e1507b3f38c2f"} Dec 03 07:29:56 crc kubenswrapper[4612]: I1203 07:29:56.267428 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" event={"ID":"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf","Type":"ContainerStarted","Data":"a312b03f4510490a7c149bd32fd50830b58acb615595eb3ee89b802a8e42f803"} Dec 03 07:29:56 crc kubenswrapper[4612]: I1203 07:29:56.267472 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" event={"ID":"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf","Type":"ContainerStarted","Data":"a484bfd4590cbbffa2496acdac9504bb172566575cb6b80f87fe15a8afeb068a"} Dec 03 07:29:56 crc kubenswrapper[4612]: I1203 07:29:56.267641 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:29:56 crc kubenswrapper[4612]: I1203 07:29:56.272745 4612 generic.go:334] "Generic (PLEG): container finished" podID="faf2b695-863f-4f58-b53a-66aa4addd381" containerID="6be905078a93db5ab54607f87465c07ba75a93c3051f6dd2bb11d819f282aa53" exitCode=0 Dec 03 07:29:56 crc kubenswrapper[4612]: I1203 07:29:56.272795 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r669p" event={"ID":"faf2b695-863f-4f58-b53a-66aa4addd381","Type":"ContainerDied","Data":"6be905078a93db5ab54607f87465c07ba75a93c3051f6dd2bb11d819f282aa53"} Dec 03 07:29:56 crc kubenswrapper[4612]: I1203 07:29:56.294881 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" podStartSLOduration=141.294856815 podStartE2EDuration="2m21.294856815s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:29:56.292246638 +0000 UTC m=+159.465604038" watchObservedRunningTime="2025-12-03 07:29:56.294856815 +0000 UTC m=+159.468214215" Dec 03 07:29:56 crc kubenswrapper[4612]: I1203 07:29:56.443447 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p52h2"] Dec 03 07:29:56 crc kubenswrapper[4612]: I1203 07:29:56.920072 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:29:56 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:29:56 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:29:56 crc kubenswrapper[4612]: healthz check failed Dec 03 07:29:56 crc kubenswrapper[4612]: I1203 07:29:56.921120 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:57 crc kubenswrapper[4612]: I1203 07:29:57.306832 4612 generic.go:334] "Generic (PLEG): container finished" podID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" containerID="4d7f0a7c90d486008d4e732ad78feadf95385c924f9a0f51ecd866e981ecd494" exitCode=0 Dec 03 07:29:57 crc kubenswrapper[4612]: I1203 07:29:57.306892 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p52h2" event={"ID":"0f3dde25-37e5-4b10-95f4-18e573b0e5a4","Type":"ContainerDied","Data":"4d7f0a7c90d486008d4e732ad78feadf95385c924f9a0f51ecd866e981ecd494"} Dec 03 07:29:57 crc kubenswrapper[4612]: I1203 07:29:57.306917 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p52h2" event={"ID":"0f3dde25-37e5-4b10-95f4-18e573b0e5a4","Type":"ContainerStarted","Data":"2961be54b1836315e2a18f7c03ff9e5d3ba954f548396396fa7cf7f9b941f36f"} Dec 03 07:29:57 crc kubenswrapper[4612]: I1203 07:29:57.355469 4612 generic.go:334] "Generic (PLEG): container finished" podID="3625877f-a938-482d-90ed-f5fce8ef232d" containerID="b48e75fa15cb3516931ba7f0815ecb77baca90b99402ebd828e09e1d9bc3c367" exitCode=0 Dec 03 07:29:57 crc kubenswrapper[4612]: I1203 07:29:57.355565 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6trx" event={"ID":"3625877f-a938-482d-90ed-f5fce8ef232d","Type":"ContainerDied","Data":"b48e75fa15cb3516931ba7f0815ecb77baca90b99402ebd828e09e1d9bc3c367"} Dec 03 07:29:57 crc kubenswrapper[4612]: I1203 07:29:57.364239 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"782703f2-6f49-436a-9ddb-d9a7d3024d83","Type":"ContainerStarted","Data":"19f043d6041de8584797713f99a5410467e03897fd149d29707b915d4dd63f66"} Dec 03 07:29:57 crc kubenswrapper[4612]: I1203 07:29:57.918965 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:29:57 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:29:57 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:29:57 crc kubenswrapper[4612]: healthz check failed Dec 03 07:29:57 crc kubenswrapper[4612]: I1203 07:29:57.919061 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:57 crc kubenswrapper[4612]: I1203 07:29:57.946559 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs\") pod \"network-metrics-daemon-8m4gl\" (UID: \"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\") " pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.026233 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fca5f46d-010f-4d46-8926-fd2a2cb9ee1e-metrics-certs\") pod \"network-metrics-daemon-8m4gl\" (UID: \"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e\") " pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.109878 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8m4gl" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.292845 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-8m4gl"] Dec 03 07:29:58 crc kubenswrapper[4612]: W1203 07:29:58.300509 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfca5f46d_010f_4d46_8926_fd2a2cb9ee1e.slice/crio-a2ce4e434c2aebcef3439410a378dc721ec0954bf9c156eca95937144286acb5 WatchSource:0}: Error finding container a2ce4e434c2aebcef3439410a378dc721ec0954bf9c156eca95937144286acb5: Status 404 returned error can't find the container with id a2ce4e434c2aebcef3439410a378dc721ec0954bf9c156eca95937144286acb5 Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.372182 4612 generic.go:334] "Generic (PLEG): container finished" podID="782703f2-6f49-436a-9ddb-d9a7d3024d83" containerID="19f043d6041de8584797713f99a5410467e03897fd149d29707b915d4dd63f66" exitCode=0 Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.372261 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"782703f2-6f49-436a-9ddb-d9a7d3024d83","Type":"ContainerDied","Data":"19f043d6041de8584797713f99a5410467e03897fd149d29707b915d4dd63f66"} Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.376455 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" event={"ID":"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e","Type":"ContainerStarted","Data":"a2ce4e434c2aebcef3439410a378dc721ec0954bf9c156eca95937144286acb5"} Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.398431 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.409333 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-9rnzg" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.615084 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.616414 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.622617 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.623029 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.669707 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.760685 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/552446c1-d4b3-47b8-bc8c-010f69c6a8ab-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"552446c1-d4b3-47b8-bc8c-010f69c6a8ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.760751 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/552446c1-d4b3-47b8-bc8c-010f69c6a8ab-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"552446c1-d4b3-47b8-bc8c-010f69c6a8ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.861798 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/552446c1-d4b3-47b8-bc8c-010f69c6a8ab-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"552446c1-d4b3-47b8-bc8c-010f69c6a8ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.861864 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/552446c1-d4b3-47b8-bc8c-010f69c6a8ab-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"552446c1-d4b3-47b8-bc8c-010f69c6a8ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.862053 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/552446c1-d4b3-47b8-bc8c-010f69c6a8ab-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"552446c1-d4b3-47b8-bc8c-010f69c6a8ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.883525 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/552446c1-d4b3-47b8-bc8c-010f69c6a8ab-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"552446c1-d4b3-47b8-bc8c-010f69c6a8ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.919697 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:29:58 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:29:58 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:29:58 crc kubenswrapper[4612]: healthz check failed Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.919751 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:58 crc kubenswrapper[4612]: I1203 07:29:58.943788 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 07:29:59 crc kubenswrapper[4612]: I1203 07:29:59.417335 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" event={"ID":"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e","Type":"ContainerStarted","Data":"c9a3caba395b15e61e534a998e9e326710d0f763ce8c5f5119b4d6cb9551da84"} Dec 03 07:29:59 crc kubenswrapper[4612]: I1203 07:29:59.566198 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 07:29:59 crc kubenswrapper[4612]: I1203 07:29:59.668274 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-c2f7j" Dec 03 07:29:59 crc kubenswrapper[4612]: I1203 07:29:59.740482 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 07:29:59 crc kubenswrapper[4612]: I1203 07:29:59.887382 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/782703f2-6f49-436a-9ddb-d9a7d3024d83-kubelet-dir\") pod \"782703f2-6f49-436a-9ddb-d9a7d3024d83\" (UID: \"782703f2-6f49-436a-9ddb-d9a7d3024d83\") " Dec 03 07:29:59 crc kubenswrapper[4612]: I1203 07:29:59.887493 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/782703f2-6f49-436a-9ddb-d9a7d3024d83-kube-api-access\") pod \"782703f2-6f49-436a-9ddb-d9a7d3024d83\" (UID: \"782703f2-6f49-436a-9ddb-d9a7d3024d83\") " Dec 03 07:29:59 crc kubenswrapper[4612]: I1203 07:29:59.887517 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/782703f2-6f49-436a-9ddb-d9a7d3024d83-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "782703f2-6f49-436a-9ddb-d9a7d3024d83" (UID: "782703f2-6f49-436a-9ddb-d9a7d3024d83"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:29:59 crc kubenswrapper[4612]: I1203 07:29:59.887705 4612 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/782703f2-6f49-436a-9ddb-d9a7d3024d83-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 07:29:59 crc kubenswrapper[4612]: I1203 07:29:59.912146 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/782703f2-6f49-436a-9ddb-d9a7d3024d83-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "782703f2-6f49-436a-9ddb-d9a7d3024d83" (UID: "782703f2-6f49-436a-9ddb-d9a7d3024d83"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:29:59 crc kubenswrapper[4612]: I1203 07:29:59.932575 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:29:59 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:29:59 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:29:59 crc kubenswrapper[4612]: healthz check failed Dec 03 07:29:59 crc kubenswrapper[4612]: I1203 07:29:59.932637 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:29:59 crc kubenswrapper[4612]: I1203 07:29:59.988861 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/782703f2-6f49-436a-9ddb-d9a7d3024d83-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.138249 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn"] Dec 03 07:30:00 crc kubenswrapper[4612]: E1203 07:30:00.138736 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="782703f2-6f49-436a-9ddb-d9a7d3024d83" containerName="pruner" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.138749 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="782703f2-6f49-436a-9ddb-d9a7d3024d83" containerName="pruner" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.138841 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="782703f2-6f49-436a-9ddb-d9a7d3024d83" containerName="pruner" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.139263 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.142526 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.142810 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.152679 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn"] Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.298279 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhslc\" (UniqueName: \"kubernetes.io/projected/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-kube-api-access-qhslc\") pod \"collect-profiles-29412450-cvvtn\" (UID: \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.298318 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-config-volume\") pod \"collect-profiles-29412450-cvvtn\" (UID: \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.298336 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-secret-volume\") pod \"collect-profiles-29412450-cvvtn\" (UID: \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.399405 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhslc\" (UniqueName: \"kubernetes.io/projected/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-kube-api-access-qhslc\") pod \"collect-profiles-29412450-cvvtn\" (UID: \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.399442 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-secret-volume\") pod \"collect-profiles-29412450-cvvtn\" (UID: \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.399459 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-config-volume\") pod \"collect-profiles-29412450-cvvtn\" (UID: \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.401390 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-config-volume\") pod \"collect-profiles-29412450-cvvtn\" (UID: \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.419045 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-secret-volume\") pod \"collect-profiles-29412450-cvvtn\" (UID: \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.419441 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhslc\" (UniqueName: \"kubernetes.io/projected/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-kube-api-access-qhslc\") pod \"collect-profiles-29412450-cvvtn\" (UID: \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.440343 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"782703f2-6f49-436a-9ddb-d9a7d3024d83","Type":"ContainerDied","Data":"bc6832c8e30992b8fe241022b4be6c91a34be7e3ed3a8411e30e1507b3f38c2f"} Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.440395 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc6832c8e30992b8fe241022b4be6c91a34be7e3ed3a8411e30e1507b3f38c2f" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.440459 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.452347 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"552446c1-d4b3-47b8-bc8c-010f69c6a8ab","Type":"ContainerStarted","Data":"3973c85fc5db362dba93d7779a003753b9a3684eb8de2d12ddf13dcc1598aa5b"} Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.462499 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.919603 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:30:00 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:30:00 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:30:00 crc kubenswrapper[4612]: healthz check failed Dec 03 07:30:00 crc kubenswrapper[4612]: I1203 07:30:00.919866 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:30:01 crc kubenswrapper[4612]: I1203 07:30:01.018732 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn"] Dec 03 07:30:01 crc kubenswrapper[4612]: I1203 07:30:01.464860 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-8m4gl" event={"ID":"fca5f46d-010f-4d46-8926-fd2a2cb9ee1e","Type":"ContainerStarted","Data":"18336b808cc43742181da9d6584147ea91c017c1aebcd478c727e439d377865e"} Dec 03 07:30:01 crc kubenswrapper[4612]: I1203 07:30:01.471204 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"552446c1-d4b3-47b8-bc8c-010f69c6a8ab","Type":"ContainerStarted","Data":"8cf0233f0f8eb75736c71a11404446466eca975e4b8eb6a03170be2f41d22f74"} Dec 03 07:30:01 crc kubenswrapper[4612]: I1203 07:30:01.472818 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" event={"ID":"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e","Type":"ContainerStarted","Data":"21c7dab1fa7c27b85732cad37bd813dcb6ae8ae24a2eaeb322f5b0d9a433dc88"} Dec 03 07:30:01 crc kubenswrapper[4612]: I1203 07:30:01.483911 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-8m4gl" podStartSLOduration=146.483889075 podStartE2EDuration="2m26.483889075s" podCreationTimestamp="2025-12-03 07:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:30:01.480214201 +0000 UTC m=+164.653571621" watchObservedRunningTime="2025-12-03 07:30:01.483889075 +0000 UTC m=+164.657246495" Dec 03 07:30:01 crc kubenswrapper[4612]: I1203 07:30:01.920087 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:30:01 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:30:01 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:30:01 crc kubenswrapper[4612]: healthz check failed Dec 03 07:30:01 crc kubenswrapper[4612]: I1203 07:30:01.920150 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:30:02 crc kubenswrapper[4612]: I1203 07:30:02.493651 4612 generic.go:334] "Generic (PLEG): container finished" podID="552446c1-d4b3-47b8-bc8c-010f69c6a8ab" containerID="8cf0233f0f8eb75736c71a11404446466eca975e4b8eb6a03170be2f41d22f74" exitCode=0 Dec 03 07:30:02 crc kubenswrapper[4612]: I1203 07:30:02.493725 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"552446c1-d4b3-47b8-bc8c-010f69c6a8ab","Type":"ContainerDied","Data":"8cf0233f0f8eb75736c71a11404446466eca975e4b8eb6a03170be2f41d22f74"} Dec 03 07:30:02 crc kubenswrapper[4612]: I1203 07:30:02.514935 4612 generic.go:334] "Generic (PLEG): container finished" podID="fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e" containerID="2c91aa771a3949b0c87220ea3a6edee18105485f517ee72bdbf9102b19114cee" exitCode=0 Dec 03 07:30:02 crc kubenswrapper[4612]: I1203 07:30:02.515061 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" event={"ID":"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e","Type":"ContainerDied","Data":"2c91aa771a3949b0c87220ea3a6edee18105485f517ee72bdbf9102b19114cee"} Dec 03 07:30:02 crc kubenswrapper[4612]: I1203 07:30:02.919323 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:30:02 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:30:02 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:30:02 crc kubenswrapper[4612]: healthz check failed Dec 03 07:30:02 crc kubenswrapper[4612]: I1203 07:30:02.919415 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:30:03 crc kubenswrapper[4612]: I1203 07:30:03.538632 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-k6tc4" Dec 03 07:30:03 crc kubenswrapper[4612]: I1203 07:30:03.575487 4612 patch_prober.go:28] interesting pod/console-f9d7485db-477tn container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Dec 03 07:30:03 crc kubenswrapper[4612]: I1203 07:30:03.575540 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-477tn" podUID="6bae73b0-37ed-4404-935d-c9afce883fd2" containerName="console" probeResult="failure" output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" Dec 03 07:30:03 crc kubenswrapper[4612]: I1203 07:30:03.918803 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:30:03 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:30:03 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:30:03 crc kubenswrapper[4612]: healthz check failed Dec 03 07:30:03 crc kubenswrapper[4612]: I1203 07:30:03.918867 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:30:04 crc kubenswrapper[4612]: I1203 07:30:04.918787 4612 patch_prober.go:28] interesting pod/router-default-5444994796-mng7v container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 07:30:04 crc kubenswrapper[4612]: [-]has-synced failed: reason withheld Dec 03 07:30:04 crc kubenswrapper[4612]: [+]process-running ok Dec 03 07:30:04 crc kubenswrapper[4612]: healthz check failed Dec 03 07:30:04 crc kubenswrapper[4612]: I1203 07:30:04.918849 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mng7v" podUID="db79bc10-b5e3-4852-973f-b00e50d6314e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:30:05 crc kubenswrapper[4612]: I1203 07:30:05.918952 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:30:05 crc kubenswrapper[4612]: I1203 07:30:05.926140 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-mng7v" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.057472 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.062197 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.243231 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/552446c1-d4b3-47b8-bc8c-010f69c6a8ab-kube-api-access\") pod \"552446c1-d4b3-47b8-bc8c-010f69c6a8ab\" (UID: \"552446c1-d4b3-47b8-bc8c-010f69c6a8ab\") " Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.243284 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-config-volume\") pod \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\" (UID: \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\") " Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.243358 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-secret-volume\") pod \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\" (UID: \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\") " Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.243417 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhslc\" (UniqueName: \"kubernetes.io/projected/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-kube-api-access-qhslc\") pod \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\" (UID: \"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e\") " Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.243451 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/552446c1-d4b3-47b8-bc8c-010f69c6a8ab-kubelet-dir\") pod \"552446c1-d4b3-47b8-bc8c-010f69c6a8ab\" (UID: \"552446c1-d4b3-47b8-bc8c-010f69c6a8ab\") " Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.243639 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/552446c1-d4b3-47b8-bc8c-010f69c6a8ab-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "552446c1-d4b3-47b8-bc8c-010f69c6a8ab" (UID: "552446c1-d4b3-47b8-bc8c-010f69c6a8ab"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.244228 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-config-volume" (OuterVolumeSpecName: "config-volume") pod "fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e" (UID: "fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.248856 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-kube-api-access-qhslc" (OuterVolumeSpecName: "kube-api-access-qhslc") pod "fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e" (UID: "fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e"). InnerVolumeSpecName "kube-api-access-qhslc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.250082 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e" (UID: "fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.253097 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/552446c1-d4b3-47b8-bc8c-010f69c6a8ab-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "552446c1-d4b3-47b8-bc8c-010f69c6a8ab" (UID: "552446c1-d4b3-47b8-bc8c-010f69c6a8ab"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.344936 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhslc\" (UniqueName: \"kubernetes.io/projected/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-kube-api-access-qhslc\") on node \"crc\" DevicePath \"\"" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.344990 4612 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/552446c1-d4b3-47b8-bc8c-010f69c6a8ab-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.345005 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/552446c1-d4b3-47b8-bc8c-010f69c6a8ab-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.345017 4612 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.345028 4612 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.578482 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.583743 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.654153 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" event={"ID":"fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e","Type":"ContainerDied","Data":"21c7dab1fa7c27b85732cad37bd813dcb6ae8ae24a2eaeb322f5b0d9a433dc88"} Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.654199 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21c7dab1fa7c27b85732cad37bd813dcb6ae8ae24a2eaeb322f5b0d9a433dc88" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.654271 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.675692 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.676771 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"552446c1-d4b3-47b8-bc8c-010f69c6a8ab","Type":"ContainerDied","Data":"3973c85fc5db362dba93d7779a003753b9a3684eb8de2d12ddf13dcc1598aa5b"} Dec 03 07:30:13 crc kubenswrapper[4612]: I1203 07:30:13.676830 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3973c85fc5db362dba93d7779a003753b9a3684eb8de2d12ddf13dcc1598aa5b" Dec 03 07:30:14 crc kubenswrapper[4612]: I1203 07:30:14.760470 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:30:17 crc kubenswrapper[4612]: I1203 07:30:17.137576 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:30:17 crc kubenswrapper[4612]: I1203 07:30:17.138028 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:30:23 crc kubenswrapper[4612]: I1203 07:30:23.440843 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 07:30:24 crc kubenswrapper[4612]: I1203 07:30:24.603690 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mnh7m" Dec 03 07:30:31 crc kubenswrapper[4612]: E1203 07:30:31.017223 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 03 07:30:31 crc kubenswrapper[4612]: E1203 07:30:31.018082 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q2ngh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-p52h2_openshift-marketplace(0f3dde25-37e5-4b10-95f4-18e573b0e5a4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 07:30:31 crc kubenswrapper[4612]: E1203 07:30:31.019251 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-p52h2" podUID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.311445 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 07:30:32 crc kubenswrapper[4612]: E1203 07:30:32.312379 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e" containerName="collect-profiles" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.312398 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e" containerName="collect-profiles" Dec 03 07:30:32 crc kubenswrapper[4612]: E1203 07:30:32.312423 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="552446c1-d4b3-47b8-bc8c-010f69c6a8ab" containerName="pruner" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.312430 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="552446c1-d4b3-47b8-bc8c-010f69c6a8ab" containerName="pruner" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.312557 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="552446c1-d4b3-47b8-bc8c-010f69c6a8ab" containerName="pruner" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.312573 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e" containerName="collect-profiles" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.313609 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.316608 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.316709 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.319347 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.493393 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f9acfd5-2f7a-4b96-a233-9ef887c45986-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"7f9acfd5-2f7a-4b96-a233-9ef887c45986\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.493451 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7f9acfd5-2f7a-4b96-a233-9ef887c45986-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"7f9acfd5-2f7a-4b96-a233-9ef887c45986\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.594618 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f9acfd5-2f7a-4b96-a233-9ef887c45986-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"7f9acfd5-2f7a-4b96-a233-9ef887c45986\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.594675 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7f9acfd5-2f7a-4b96-a233-9ef887c45986-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"7f9acfd5-2f7a-4b96-a233-9ef887c45986\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.594778 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7f9acfd5-2f7a-4b96-a233-9ef887c45986-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"7f9acfd5-2f7a-4b96-a233-9ef887c45986\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.617651 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f9acfd5-2f7a-4b96-a233-9ef887c45986-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"7f9acfd5-2f7a-4b96-a233-9ef887c45986\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 07:30:32 crc kubenswrapper[4612]: I1203 07:30:32.634536 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 07:30:33 crc kubenswrapper[4612]: E1203 07:30:33.065090 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-p52h2" podUID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" Dec 03 07:30:33 crc kubenswrapper[4612]: E1203 07:30:33.180531 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 03 07:30:33 crc kubenswrapper[4612]: E1203 07:30:33.180924 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dqngr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-mj9vl_openshift-marketplace(3eaec957-909d-4e80-9bd2-bf765a70a1b1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 07:30:33 crc kubenswrapper[4612]: E1203 07:30:33.182649 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-mj9vl" podUID="3eaec957-909d-4e80-9bd2-bf765a70a1b1" Dec 03 07:30:33 crc kubenswrapper[4612]: E1203 07:30:33.231905 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 03 07:30:33 crc kubenswrapper[4612]: E1203 07:30:33.232131 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wt7tx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-nj5zt_openshift-marketplace(cdd62db3-0bae-4037-915e-a339d0ecc7a6): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 07:30:33 crc kubenswrapper[4612]: E1203 07:30:33.233538 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-nj5zt" podUID="cdd62db3-0bae-4037-915e-a339d0ecc7a6" Dec 03 07:30:33 crc kubenswrapper[4612]: E1203 07:30:33.255121 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 03 07:30:33 crc kubenswrapper[4612]: E1203 07:30:33.255312 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6nmrk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-r669p_openshift-marketplace(faf2b695-863f-4f58-b53a-66aa4addd381): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 07:30:33 crc kubenswrapper[4612]: E1203 07:30:33.257362 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-r669p" podUID="faf2b695-863f-4f58-b53a-66aa4addd381" Dec 03 07:30:33 crc kubenswrapper[4612]: I1203 07:30:33.531451 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 07:30:33 crc kubenswrapper[4612]: I1203 07:30:33.784748 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"7f9acfd5-2f7a-4b96-a233-9ef887c45986","Type":"ContainerStarted","Data":"ceaf411cc4afa34a4924f40c0b306db41fd251e207f339bf01739faa2905f00d"} Dec 03 07:30:33 crc kubenswrapper[4612]: I1203 07:30:33.786105 4612 generic.go:334] "Generic (PLEG): container finished" podID="b11d0417-0152-4c68-871a-b7e40af003e4" containerID="51118c0820f70b4f5cfd36fd5c9b21dcee7d79b43d0fe0e8b997eaf3ee3d0a40" exitCode=0 Dec 03 07:30:33 crc kubenswrapper[4612]: I1203 07:30:33.786156 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hvvvp" event={"ID":"b11d0417-0152-4c68-871a-b7e40af003e4","Type":"ContainerDied","Data":"51118c0820f70b4f5cfd36fd5c9b21dcee7d79b43d0fe0e8b997eaf3ee3d0a40"} Dec 03 07:30:33 crc kubenswrapper[4612]: I1203 07:30:33.790726 4612 generic.go:334] "Generic (PLEG): container finished" podID="5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" containerID="00a10c817cc02b04bb376e554cc473168ef8ec4cb1cf29a7075afb58bfb3ee8f" exitCode=0 Dec 03 07:30:33 crc kubenswrapper[4612]: I1203 07:30:33.790805 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9r8gz" event={"ID":"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7","Type":"ContainerDied","Data":"00a10c817cc02b04bb376e554cc473168ef8ec4cb1cf29a7075afb58bfb3ee8f"} Dec 03 07:30:33 crc kubenswrapper[4612]: I1203 07:30:33.797262 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88rgv" event={"ID":"75a8ee2e-4160-4458-9107-9d2a276edb94","Type":"ContainerStarted","Data":"d2ae56b720c170f690e49608786b48e04602c80dca4cd8b3096c4c9adccd5cbf"} Dec 03 07:30:33 crc kubenswrapper[4612]: I1203 07:30:33.799495 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6trx" event={"ID":"3625877f-a938-482d-90ed-f5fce8ef232d","Type":"ContainerStarted","Data":"c8d623b780aaae936a54f0aad49bf28b4bf33deb8b39284b044a32dccf11552b"} Dec 03 07:30:33 crc kubenswrapper[4612]: E1203 07:30:33.801056 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-mj9vl" podUID="3eaec957-909d-4e80-9bd2-bf765a70a1b1" Dec 03 07:30:33 crc kubenswrapper[4612]: E1203 07:30:33.801362 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-nj5zt" podUID="cdd62db3-0bae-4037-915e-a339d0ecc7a6" Dec 03 07:30:33 crc kubenswrapper[4612]: E1203 07:30:33.808992 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-r669p" podUID="faf2b695-863f-4f58-b53a-66aa4addd381" Dec 03 07:30:34 crc kubenswrapper[4612]: I1203 07:30:34.806682 4612 generic.go:334] "Generic (PLEG): container finished" podID="3625877f-a938-482d-90ed-f5fce8ef232d" containerID="c8d623b780aaae936a54f0aad49bf28b4bf33deb8b39284b044a32dccf11552b" exitCode=0 Dec 03 07:30:34 crc kubenswrapper[4612]: I1203 07:30:34.807066 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6trx" event={"ID":"3625877f-a938-482d-90ed-f5fce8ef232d","Type":"ContainerDied","Data":"c8d623b780aaae936a54f0aad49bf28b4bf33deb8b39284b044a32dccf11552b"} Dec 03 07:30:34 crc kubenswrapper[4612]: I1203 07:30:34.809722 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"7f9acfd5-2f7a-4b96-a233-9ef887c45986","Type":"ContainerStarted","Data":"7603eeb92545b5f118778faf203201cfa3dd0c848b2701db6b2ad49683e75e2c"} Dec 03 07:30:34 crc kubenswrapper[4612]: I1203 07:30:34.813565 4612 generic.go:334] "Generic (PLEG): container finished" podID="75a8ee2e-4160-4458-9107-9d2a276edb94" containerID="d2ae56b720c170f690e49608786b48e04602c80dca4cd8b3096c4c9adccd5cbf" exitCode=0 Dec 03 07:30:34 crc kubenswrapper[4612]: I1203 07:30:34.813592 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88rgv" event={"ID":"75a8ee2e-4160-4458-9107-9d2a276edb94","Type":"ContainerDied","Data":"d2ae56b720c170f690e49608786b48e04602c80dca4cd8b3096c4c9adccd5cbf"} Dec 03 07:30:34 crc kubenswrapper[4612]: I1203 07:30:34.852781 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=2.852764563 podStartE2EDuration="2.852764563s" podCreationTimestamp="2025-12-03 07:30:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:30:34.83715875 +0000 UTC m=+198.010516150" watchObservedRunningTime="2025-12-03 07:30:34.852764563 +0000 UTC m=+198.026121963" Dec 03 07:30:35 crc kubenswrapper[4612]: I1203 07:30:35.820646 4612 generic.go:334] "Generic (PLEG): container finished" podID="7f9acfd5-2f7a-4b96-a233-9ef887c45986" containerID="7603eeb92545b5f118778faf203201cfa3dd0c848b2701db6b2ad49683e75e2c" exitCode=0 Dec 03 07:30:35 crc kubenswrapper[4612]: I1203 07:30:35.820697 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"7f9acfd5-2f7a-4b96-a233-9ef887c45986","Type":"ContainerDied","Data":"7603eeb92545b5f118778faf203201cfa3dd0c848b2701db6b2ad49683e75e2c"} Dec 03 07:30:36 crc kubenswrapper[4612]: I1203 07:30:36.901356 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 07:30:36 crc kubenswrapper[4612]: I1203 07:30:36.907642 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 07:30:36 crc kubenswrapper[4612]: I1203 07:30:36.912092 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.054113 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/010793a3-88fd-4772-bde7-ef4a17d40ba3-var-lock\") pod \"installer-9-crc\" (UID: \"010793a3-88fd-4772-bde7-ef4a17d40ba3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.054199 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/010793a3-88fd-4772-bde7-ef4a17d40ba3-kube-api-access\") pod \"installer-9-crc\" (UID: \"010793a3-88fd-4772-bde7-ef4a17d40ba3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.054216 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/010793a3-88fd-4772-bde7-ef4a17d40ba3-kubelet-dir\") pod \"installer-9-crc\" (UID: \"010793a3-88fd-4772-bde7-ef4a17d40ba3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.102874 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.155964 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/010793a3-88fd-4772-bde7-ef4a17d40ba3-var-lock\") pod \"installer-9-crc\" (UID: \"010793a3-88fd-4772-bde7-ef4a17d40ba3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.156032 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/010793a3-88fd-4772-bde7-ef4a17d40ba3-var-lock\") pod \"installer-9-crc\" (UID: \"010793a3-88fd-4772-bde7-ef4a17d40ba3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.156070 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/010793a3-88fd-4772-bde7-ef4a17d40ba3-kube-api-access\") pod \"installer-9-crc\" (UID: \"010793a3-88fd-4772-bde7-ef4a17d40ba3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.156089 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/010793a3-88fd-4772-bde7-ef4a17d40ba3-kubelet-dir\") pod \"installer-9-crc\" (UID: \"010793a3-88fd-4772-bde7-ef4a17d40ba3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.156152 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/010793a3-88fd-4772-bde7-ef4a17d40ba3-kubelet-dir\") pod \"installer-9-crc\" (UID: \"010793a3-88fd-4772-bde7-ef4a17d40ba3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.179592 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/010793a3-88fd-4772-bde7-ef4a17d40ba3-kube-api-access\") pod \"installer-9-crc\" (UID: \"010793a3-88fd-4772-bde7-ef4a17d40ba3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.223011 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.259204 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f9acfd5-2f7a-4b96-a233-9ef887c45986-kube-api-access\") pod \"7f9acfd5-2f7a-4b96-a233-9ef887c45986\" (UID: \"7f9acfd5-2f7a-4b96-a233-9ef887c45986\") " Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.259505 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7f9acfd5-2f7a-4b96-a233-9ef887c45986-kubelet-dir\") pod \"7f9acfd5-2f7a-4b96-a233-9ef887c45986\" (UID: \"7f9acfd5-2f7a-4b96-a233-9ef887c45986\") " Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.259576 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f9acfd5-2f7a-4b96-a233-9ef887c45986-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "7f9acfd5-2f7a-4b96-a233-9ef887c45986" (UID: "7f9acfd5-2f7a-4b96-a233-9ef887c45986"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.262104 4612 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7f9acfd5-2f7a-4b96-a233-9ef887c45986-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.271100 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f9acfd5-2f7a-4b96-a233-9ef887c45986-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "7f9acfd5-2f7a-4b96-a233-9ef887c45986" (UID: "7f9acfd5-2f7a-4b96-a233-9ef887c45986"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.362863 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f9acfd5-2f7a-4b96-a233-9ef887c45986-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.722129 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 07:30:37 crc kubenswrapper[4612]: W1203 07:30:37.727857 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod010793a3_88fd_4772_bde7_ef4a17d40ba3.slice/crio-f80d89e784507093c340fd5e06cdc31ce76ff188af6175638af9ad1eb51ebd6b WatchSource:0}: Error finding container f80d89e784507093c340fd5e06cdc31ce76ff188af6175638af9ad1eb51ebd6b: Status 404 returned error can't find the container with id f80d89e784507093c340fd5e06cdc31ce76ff188af6175638af9ad1eb51ebd6b Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.849701 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6trx" event={"ID":"3625877f-a938-482d-90ed-f5fce8ef232d","Type":"ContainerStarted","Data":"3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c"} Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.853521 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"7f9acfd5-2f7a-4b96-a233-9ef887c45986","Type":"ContainerDied","Data":"ceaf411cc4afa34a4924f40c0b306db41fd251e207f339bf01739faa2905f00d"} Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.853547 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ceaf411cc4afa34a4924f40c0b306db41fd251e207f339bf01739faa2905f00d" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.853577 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.856088 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hvvvp" event={"ID":"b11d0417-0152-4c68-871a-b7e40af003e4","Type":"ContainerStarted","Data":"3d483eac8d56a6c1ad8f4183fd9d35b900a2ea0e8e2e3682b24b9725ae2cf682"} Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.857906 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9r8gz" event={"ID":"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7","Type":"ContainerStarted","Data":"a25f06ed75e8eb011b8967204550823a93b080f6370015fa9ae4ef56a051df4d"} Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.858859 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"010793a3-88fd-4772-bde7-ef4a17d40ba3","Type":"ContainerStarted","Data":"f80d89e784507093c340fd5e06cdc31ce76ff188af6175638af9ad1eb51ebd6b"} Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.866475 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88rgv" event={"ID":"75a8ee2e-4160-4458-9107-9d2a276edb94","Type":"ContainerStarted","Data":"a729c8fccede3517017f78cdf57263cd8c7a9157ba1af26b4040df01ccac15cc"} Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.867518 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-k6trx" podStartSLOduration=2.919151367 podStartE2EDuration="42.867506352s" podCreationTimestamp="2025-12-03 07:29:55 +0000 UTC" firstStartedPulling="2025-12-03 07:29:57.364888447 +0000 UTC m=+160.538245837" lastFinishedPulling="2025-12-03 07:30:37.313243422 +0000 UTC m=+200.486600822" observedRunningTime="2025-12-03 07:30:37.865172373 +0000 UTC m=+201.038529773" watchObservedRunningTime="2025-12-03 07:30:37.867506352 +0000 UTC m=+201.040863762" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.889211 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9r8gz" podStartSLOduration=3.276557985 podStartE2EDuration="45.889192755s" podCreationTimestamp="2025-12-03 07:29:52 +0000 UTC" firstStartedPulling="2025-12-03 07:29:54.063234633 +0000 UTC m=+157.236592033" lastFinishedPulling="2025-12-03 07:30:36.675869383 +0000 UTC m=+199.849226803" observedRunningTime="2025-12-03 07:30:37.886103507 +0000 UTC m=+201.059460927" watchObservedRunningTime="2025-12-03 07:30:37.889192755 +0000 UTC m=+201.062550165" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.908115 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hvvvp" podStartSLOduration=2.858096527 podStartE2EDuration="43.908086177s" podCreationTimestamp="2025-12-03 07:29:54 +0000 UTC" firstStartedPulling="2025-12-03 07:29:56.230229 +0000 UTC m=+159.403586400" lastFinishedPulling="2025-12-03 07:30:37.28021865 +0000 UTC m=+200.453576050" observedRunningTime="2025-12-03 07:30:37.906762604 +0000 UTC m=+201.080120004" watchObservedRunningTime="2025-12-03 07:30:37.908086177 +0000 UTC m=+201.081443567" Dec 03 07:30:37 crc kubenswrapper[4612]: I1203 07:30:37.929279 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-88rgv" podStartSLOduration=2.478062728 podStartE2EDuration="45.929261748s" podCreationTimestamp="2025-12-03 07:29:52 +0000 UTC" firstStartedPulling="2025-12-03 07:29:54.067636717 +0000 UTC m=+157.240994117" lastFinishedPulling="2025-12-03 07:30:37.518835737 +0000 UTC m=+200.692193137" observedRunningTime="2025-12-03 07:30:37.926067046 +0000 UTC m=+201.099424456" watchObservedRunningTime="2025-12-03 07:30:37.929261748 +0000 UTC m=+201.102619148" Dec 03 07:30:38 crc kubenswrapper[4612]: I1203 07:30:38.873925 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"010793a3-88fd-4772-bde7-ef4a17d40ba3","Type":"ContainerStarted","Data":"f76d67dba8ce8d2d26e049778f1cd7ffa51940892b09cf7f60c7d144654908e1"} Dec 03 07:30:38 crc kubenswrapper[4612]: I1203 07:30:38.896396 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.89637445 podStartE2EDuration="2.89637445s" podCreationTimestamp="2025-12-03 07:30:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:30:38.894092042 +0000 UTC m=+202.067449442" watchObservedRunningTime="2025-12-03 07:30:38.89637445 +0000 UTC m=+202.069731870" Dec 03 07:30:42 crc kubenswrapper[4612]: I1203 07:30:42.417645 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:30:42 crc kubenswrapper[4612]: I1203 07:30:42.420022 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:30:42 crc kubenswrapper[4612]: I1203 07:30:42.818460 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:30:42 crc kubenswrapper[4612]: I1203 07:30:42.818750 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:30:42 crc kubenswrapper[4612]: I1203 07:30:42.959954 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:30:42 crc kubenswrapper[4612]: I1203 07:30:42.960576 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:30:42 crc kubenswrapper[4612]: I1203 07:30:42.999760 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:30:43 crc kubenswrapper[4612]: I1203 07:30:43.012685 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:30:44 crc kubenswrapper[4612]: I1203 07:30:44.018487 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9r8gz"] Dec 03 07:30:44 crc kubenswrapper[4612]: I1203 07:30:44.837953 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:30:44 crc kubenswrapper[4612]: I1203 07:30:44.837996 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:30:44 crc kubenswrapper[4612]: I1203 07:30:44.879344 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:30:44 crc kubenswrapper[4612]: I1203 07:30:44.900906 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9r8gz" podUID="5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" containerName="registry-server" containerID="cri-o://a25f06ed75e8eb011b8967204550823a93b080f6370015fa9ae4ef56a051df4d" gracePeriod=2 Dec 03 07:30:44 crc kubenswrapper[4612]: I1203 07:30:44.940664 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:30:45 crc kubenswrapper[4612]: I1203 07:30:45.447099 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:30:45 crc kubenswrapper[4612]: I1203 07:30:45.447679 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:30:45 crc kubenswrapper[4612]: I1203 07:30:45.488181 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:30:45 crc kubenswrapper[4612]: I1203 07:30:45.942739 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:30:46 crc kubenswrapper[4612]: I1203 07:30:46.912241 4612 generic.go:334] "Generic (PLEG): container finished" podID="5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" containerID="a25f06ed75e8eb011b8967204550823a93b080f6370015fa9ae4ef56a051df4d" exitCode=0 Dec 03 07:30:46 crc kubenswrapper[4612]: I1203 07:30:46.912431 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9r8gz" event={"ID":"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7","Type":"ContainerDied","Data":"a25f06ed75e8eb011b8967204550823a93b080f6370015fa9ae4ef56a051df4d"} Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.137460 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.137525 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.137588 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.138251 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.138355 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3" gracePeriod=600 Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.218289 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hvvvp"] Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.218521 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hvvvp" podUID="b11d0417-0152-4c68-871a-b7e40af003e4" containerName="registry-server" containerID="cri-o://3d483eac8d56a6c1ad8f4183fd9d35b900a2ea0e8e2e3682b24b9725ae2cf682" gracePeriod=2 Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.571013 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.589754 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbrqd\" (UniqueName: \"kubernetes.io/projected/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-kube-api-access-dbrqd\") pod \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\" (UID: \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\") " Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.590766 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-utilities\") pod \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\" (UID: \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\") " Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.591668 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-utilities" (OuterVolumeSpecName: "utilities") pod "5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" (UID: "5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.591868 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-catalog-content\") pod \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\" (UID: \"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7\") " Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.605805 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.611973 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-kube-api-access-dbrqd" (OuterVolumeSpecName: "kube-api-access-dbrqd") pod "5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" (UID: "5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7"). InnerVolumeSpecName "kube-api-access-dbrqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.649563 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" (UID: "5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.706927 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbrqd\" (UniqueName: \"kubernetes.io/projected/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-kube-api-access-dbrqd\") on node \"crc\" DevicePath \"\"" Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.706971 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.919746 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9r8gz" event={"ID":"5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7","Type":"ContainerDied","Data":"09ed3a57ddd84c7645a832dfe5c2334418af769eb30424ef1e25abf383d72b80"} Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.919783 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9r8gz" Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.919806 4612 scope.go:117] "RemoveContainer" containerID="a25f06ed75e8eb011b8967204550823a93b080f6370015fa9ae4ef56a051df4d" Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.925454 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3" exitCode=0 Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.926032 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3"} Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.935236 4612 scope.go:117] "RemoveContainer" containerID="00a10c817cc02b04bb376e554cc473168ef8ec4cb1cf29a7075afb58bfb3ee8f" Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.946056 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9r8gz"] Dec 03 07:30:47 crc kubenswrapper[4612]: I1203 07:30:47.949760 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9r8gz"] Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.012470 4612 scope.go:117] "RemoveContainer" containerID="e2d14af8c81babe99ebfa03ea823b8636787a52e743d820db87decc4a60ea31c" Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.881244 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.932810 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vr4c\" (UniqueName: \"kubernetes.io/projected/b11d0417-0152-4c68-871a-b7e40af003e4-kube-api-access-9vr4c\") pod \"b11d0417-0152-4c68-871a-b7e40af003e4\" (UID: \"b11d0417-0152-4c68-871a-b7e40af003e4\") " Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.932854 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11d0417-0152-4c68-871a-b7e40af003e4-catalog-content\") pod \"b11d0417-0152-4c68-871a-b7e40af003e4\" (UID: \"b11d0417-0152-4c68-871a-b7e40af003e4\") " Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.932966 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11d0417-0152-4c68-871a-b7e40af003e4-utilities\") pod \"b11d0417-0152-4c68-871a-b7e40af003e4\" (UID: \"b11d0417-0152-4c68-871a-b7e40af003e4\") " Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.933597 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mj9vl" event={"ID":"3eaec957-909d-4e80-9bd2-bf765a70a1b1","Type":"ContainerStarted","Data":"5fce832a1aff630dfe5123cae700e362af046d73a6d9190576b52ec5005dab94"} Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.935908 4612 generic.go:334] "Generic (PLEG): container finished" podID="cdd62db3-0bae-4037-915e-a339d0ecc7a6" containerID="cc34f41af25e403e47f8f2ad8c21f3f47a474be7ccdb9e397fcf472a4261ddc4" exitCode=0 Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.936012 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nj5zt" event={"ID":"cdd62db3-0bae-4037-915e-a339d0ecc7a6","Type":"ContainerDied","Data":"cc34f41af25e403e47f8f2ad8c21f3f47a474be7ccdb9e397fcf472a4261ddc4"} Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.938506 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11d0417-0152-4c68-871a-b7e40af003e4-kube-api-access-9vr4c" (OuterVolumeSpecName: "kube-api-access-9vr4c") pod "b11d0417-0152-4c68-871a-b7e40af003e4" (UID: "b11d0417-0152-4c68-871a-b7e40af003e4"). InnerVolumeSpecName "kube-api-access-9vr4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.939816 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11d0417-0152-4c68-871a-b7e40af003e4-utilities" (OuterVolumeSpecName: "utilities") pod "b11d0417-0152-4c68-871a-b7e40af003e4" (UID: "b11d0417-0152-4c68-871a-b7e40af003e4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.940514 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"496b2817b85007c387e388726327b1b3220dec10963ee8cffca5321badedaf85"} Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.943344 4612 generic.go:334] "Generic (PLEG): container finished" podID="b11d0417-0152-4c68-871a-b7e40af003e4" containerID="3d483eac8d56a6c1ad8f4183fd9d35b900a2ea0e8e2e3682b24b9725ae2cf682" exitCode=0 Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.943398 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hvvvp" event={"ID":"b11d0417-0152-4c68-871a-b7e40af003e4","Type":"ContainerDied","Data":"3d483eac8d56a6c1ad8f4183fd9d35b900a2ea0e8e2e3682b24b9725ae2cf682"} Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.943424 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hvvvp" event={"ID":"b11d0417-0152-4c68-871a-b7e40af003e4","Type":"ContainerDied","Data":"34703deb80cf9acbe12f50abf603df6ee8aa53705c03982a81830e7027561ac4"} Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.943444 4612 scope.go:117] "RemoveContainer" containerID="3d483eac8d56a6c1ad8f4183fd9d35b900a2ea0e8e2e3682b24b9725ae2cf682" Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.943565 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hvvvp" Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.960731 4612 generic.go:334] "Generic (PLEG): container finished" podID="faf2b695-863f-4f58-b53a-66aa4addd381" containerID="8c4dcf81f2355e7099daa32f62d57c5530aa37292b6e7e1412b4c284b4d637af" exitCode=0 Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.960833 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r669p" event={"ID":"faf2b695-863f-4f58-b53a-66aa4addd381","Type":"ContainerDied","Data":"8c4dcf81f2355e7099daa32f62d57c5530aa37292b6e7e1412b4c284b4d637af"} Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.980786 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p52h2" event={"ID":"0f3dde25-37e5-4b10-95f4-18e573b0e5a4","Type":"ContainerStarted","Data":"d3ae3fa506db00d6613291f500fbff0b9fc780fef1c1d43d2e82365fc9cff51a"} Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.980865 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11d0417-0152-4c68-871a-b7e40af003e4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11d0417-0152-4c68-871a-b7e40af003e4" (UID: "b11d0417-0152-4c68-871a-b7e40af003e4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:30:48 crc kubenswrapper[4612]: I1203 07:30:48.983401 4612 scope.go:117] "RemoveContainer" containerID="51118c0820f70b4f5cfd36fd5c9b21dcee7d79b43d0fe0e8b997eaf3ee3d0a40" Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.009590 4612 scope.go:117] "RemoveContainer" containerID="0e1b419e357b26d3cdebc01edf7486ab943effb0c9f8e7cfe4d0b9cfcf4f444f" Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.024056 4612 scope.go:117] "RemoveContainer" containerID="3d483eac8d56a6c1ad8f4183fd9d35b900a2ea0e8e2e3682b24b9725ae2cf682" Dec 03 07:30:49 crc kubenswrapper[4612]: E1203 07:30:49.024600 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d483eac8d56a6c1ad8f4183fd9d35b900a2ea0e8e2e3682b24b9725ae2cf682\": container with ID starting with 3d483eac8d56a6c1ad8f4183fd9d35b900a2ea0e8e2e3682b24b9725ae2cf682 not found: ID does not exist" containerID="3d483eac8d56a6c1ad8f4183fd9d35b900a2ea0e8e2e3682b24b9725ae2cf682" Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.024638 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d483eac8d56a6c1ad8f4183fd9d35b900a2ea0e8e2e3682b24b9725ae2cf682"} err="failed to get container status \"3d483eac8d56a6c1ad8f4183fd9d35b900a2ea0e8e2e3682b24b9725ae2cf682\": rpc error: code = NotFound desc = could not find container \"3d483eac8d56a6c1ad8f4183fd9d35b900a2ea0e8e2e3682b24b9725ae2cf682\": container with ID starting with 3d483eac8d56a6c1ad8f4183fd9d35b900a2ea0e8e2e3682b24b9725ae2cf682 not found: ID does not exist" Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.024665 4612 scope.go:117] "RemoveContainer" containerID="51118c0820f70b4f5cfd36fd5c9b21dcee7d79b43d0fe0e8b997eaf3ee3d0a40" Dec 03 07:30:49 crc kubenswrapper[4612]: E1203 07:30:49.025111 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51118c0820f70b4f5cfd36fd5c9b21dcee7d79b43d0fe0e8b997eaf3ee3d0a40\": container with ID starting with 51118c0820f70b4f5cfd36fd5c9b21dcee7d79b43d0fe0e8b997eaf3ee3d0a40 not found: ID does not exist" containerID="51118c0820f70b4f5cfd36fd5c9b21dcee7d79b43d0fe0e8b997eaf3ee3d0a40" Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.025148 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51118c0820f70b4f5cfd36fd5c9b21dcee7d79b43d0fe0e8b997eaf3ee3d0a40"} err="failed to get container status \"51118c0820f70b4f5cfd36fd5c9b21dcee7d79b43d0fe0e8b997eaf3ee3d0a40\": rpc error: code = NotFound desc = could not find container \"51118c0820f70b4f5cfd36fd5c9b21dcee7d79b43d0fe0e8b997eaf3ee3d0a40\": container with ID starting with 51118c0820f70b4f5cfd36fd5c9b21dcee7d79b43d0fe0e8b997eaf3ee3d0a40 not found: ID does not exist" Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.025167 4612 scope.go:117] "RemoveContainer" containerID="0e1b419e357b26d3cdebc01edf7486ab943effb0c9f8e7cfe4d0b9cfcf4f444f" Dec 03 07:30:49 crc kubenswrapper[4612]: E1203 07:30:49.025392 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e1b419e357b26d3cdebc01edf7486ab943effb0c9f8e7cfe4d0b9cfcf4f444f\": container with ID starting with 0e1b419e357b26d3cdebc01edf7486ab943effb0c9f8e7cfe4d0b9cfcf4f444f not found: ID does not exist" containerID="0e1b419e357b26d3cdebc01edf7486ab943effb0c9f8e7cfe4d0b9cfcf4f444f" Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.025432 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e1b419e357b26d3cdebc01edf7486ab943effb0c9f8e7cfe4d0b9cfcf4f444f"} err="failed to get container status \"0e1b419e357b26d3cdebc01edf7486ab943effb0c9f8e7cfe4d0b9cfcf4f444f\": rpc error: code = NotFound desc = could not find container \"0e1b419e357b26d3cdebc01edf7486ab943effb0c9f8e7cfe4d0b9cfcf4f444f\": container with ID starting with 0e1b419e357b26d3cdebc01edf7486ab943effb0c9f8e7cfe4d0b9cfcf4f444f not found: ID does not exist" Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.033957 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vr4c\" (UniqueName: \"kubernetes.io/projected/b11d0417-0152-4c68-871a-b7e40af003e4-kube-api-access-9vr4c\") on node \"crc\" DevicePath \"\"" Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.033982 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11d0417-0152-4c68-871a-b7e40af003e4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.033991 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11d0417-0152-4c68-871a-b7e40af003e4-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.097343 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" path="/var/lib/kubelet/pods/5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7/volumes" Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.277331 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hvvvp"] Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.286604 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hvvvp"] Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.990281 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r669p" event={"ID":"faf2b695-863f-4f58-b53a-66aa4addd381","Type":"ContainerStarted","Data":"d72af35cb93267dc65cb78da2cd510f41f4161d30c1b49ccc876e4ffde00951d"} Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.993572 4612 generic.go:334] "Generic (PLEG): container finished" podID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" containerID="d3ae3fa506db00d6613291f500fbff0b9fc780fef1c1d43d2e82365fc9cff51a" exitCode=0 Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.993656 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p52h2" event={"ID":"0f3dde25-37e5-4b10-95f4-18e573b0e5a4","Type":"ContainerDied","Data":"d3ae3fa506db00d6613291f500fbff0b9fc780fef1c1d43d2e82365fc9cff51a"} Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.996356 4612 generic.go:334] "Generic (PLEG): container finished" podID="3eaec957-909d-4e80-9bd2-bf765a70a1b1" containerID="5fce832a1aff630dfe5123cae700e362af046d73a6d9190576b52ec5005dab94" exitCode=0 Dec 03 07:30:49 crc kubenswrapper[4612]: I1203 07:30:49.996429 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mj9vl" event={"ID":"3eaec957-909d-4e80-9bd2-bf765a70a1b1","Type":"ContainerDied","Data":"5fce832a1aff630dfe5123cae700e362af046d73a6d9190576b52ec5005dab94"} Dec 03 07:30:50 crc kubenswrapper[4612]: I1203 07:30:50.001022 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nj5zt" event={"ID":"cdd62db3-0bae-4037-915e-a339d0ecc7a6","Type":"ContainerStarted","Data":"a8a717c788fa8cfcb589a3d6bb68cf35739f6ed6f1ce3772f1e1b0df56a56e33"} Dec 03 07:30:50 crc kubenswrapper[4612]: I1203 07:30:50.014143 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-r669p" podStartSLOduration=2.87379628 podStartE2EDuration="56.01412548s" podCreationTimestamp="2025-12-03 07:29:54 +0000 UTC" firstStartedPulling="2025-12-03 07:29:56.276487552 +0000 UTC m=+159.449844952" lastFinishedPulling="2025-12-03 07:30:49.416816752 +0000 UTC m=+212.590174152" observedRunningTime="2025-12-03 07:30:50.010047716 +0000 UTC m=+213.183405126" watchObservedRunningTime="2025-12-03 07:30:50.01412548 +0000 UTC m=+213.187482880" Dec 03 07:30:50 crc kubenswrapper[4612]: I1203 07:30:50.069192 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nj5zt" podStartSLOduration=3.704228568 podStartE2EDuration="59.069171654s" podCreationTimestamp="2025-12-03 07:29:51 +0000 UTC" firstStartedPulling="2025-12-03 07:29:54.070574063 +0000 UTC m=+157.243931453" lastFinishedPulling="2025-12-03 07:30:49.435517139 +0000 UTC m=+212.608874539" observedRunningTime="2025-12-03 07:30:50.065528021 +0000 UTC m=+213.238885431" watchObservedRunningTime="2025-12-03 07:30:50.069171654 +0000 UTC m=+213.242529054" Dec 03 07:30:51 crc kubenswrapper[4612]: I1203 07:30:51.006832 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mj9vl" event={"ID":"3eaec957-909d-4e80-9bd2-bf765a70a1b1","Type":"ContainerStarted","Data":"dae8cf98ed6bf57d1d1ace6188ba8867a9fb2f83ffaa6ff40f9cebd5b6f61939"} Dec 03 07:30:51 crc kubenswrapper[4612]: I1203 07:30:51.100609 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11d0417-0152-4c68-871a-b7e40af003e4" path="/var/lib/kubelet/pods/b11d0417-0152-4c68-871a-b7e40af003e4/volumes" Dec 03 07:30:51 crc kubenswrapper[4612]: I1203 07:30:51.123840 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mj9vl" podStartSLOduration=2.790474347 podStartE2EDuration="59.12382121s" podCreationTimestamp="2025-12-03 07:29:52 +0000 UTC" firstStartedPulling="2025-12-03 07:29:54.064072595 +0000 UTC m=+157.237429995" lastFinishedPulling="2025-12-03 07:30:50.397419458 +0000 UTC m=+213.570776858" observedRunningTime="2025-12-03 07:30:51.122073516 +0000 UTC m=+214.295430916" watchObservedRunningTime="2025-12-03 07:30:51.12382121 +0000 UTC m=+214.297178610" Dec 03 07:30:52 crc kubenswrapper[4612]: I1203 07:30:52.014602 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p52h2" event={"ID":"0f3dde25-37e5-4b10-95f4-18e573b0e5a4","Type":"ContainerStarted","Data":"4b1b0b203fa65414efaaf9893100d5c86d7ef3f607f0d84e7dee49c6e01b9a46"} Dec 03 07:30:52 crc kubenswrapper[4612]: I1203 07:30:52.035030 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p52h2" podStartSLOduration=3.098854766 podStartE2EDuration="57.035012435s" podCreationTimestamp="2025-12-03 07:29:55 +0000 UTC" firstStartedPulling="2025-12-03 07:29:57.308300448 +0000 UTC m=+160.481657848" lastFinishedPulling="2025-12-03 07:30:51.244458117 +0000 UTC m=+214.417815517" observedRunningTime="2025-12-03 07:30:52.031515996 +0000 UTC m=+215.204873406" watchObservedRunningTime="2025-12-03 07:30:52.035012435 +0000 UTC m=+215.208369845" Dec 03 07:30:52 crc kubenswrapper[4612]: I1203 07:30:52.211831 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:30:52 crc kubenswrapper[4612]: I1203 07:30:52.211882 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:30:52 crc kubenswrapper[4612]: I1203 07:30:52.277471 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:30:52 crc kubenswrapper[4612]: I1203 07:30:52.633607 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:30:52 crc kubenswrapper[4612]: I1203 07:30:52.633902 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:30:52 crc kubenswrapper[4612]: I1203 07:30:52.679263 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:30:54 crc kubenswrapper[4612]: I1203 07:30:54.418854 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:30:54 crc kubenswrapper[4612]: I1203 07:30:54.422105 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:30:54 crc kubenswrapper[4612]: I1203 07:30:54.469833 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:30:55 crc kubenswrapper[4612]: I1203 07:30:55.073704 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:30:55 crc kubenswrapper[4612]: I1203 07:30:55.910343 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:30:55 crc kubenswrapper[4612]: I1203 07:30:55.910586 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:30:56 crc kubenswrapper[4612]: I1203 07:30:56.947151 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-p52h2" podUID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" containerName="registry-server" probeResult="failure" output=< Dec 03 07:30:56 crc kubenswrapper[4612]: timeout: failed to connect service ":50051" within 1s Dec 03 07:30:56 crc kubenswrapper[4612]: > Dec 03 07:31:02 crc kubenswrapper[4612]: I1203 07:31:02.257738 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:31:02 crc kubenswrapper[4612]: I1203 07:31:02.677247 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:31:02 crc kubenswrapper[4612]: I1203 07:31:02.725225 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mj9vl"] Dec 03 07:31:03 crc kubenswrapper[4612]: I1203 07:31:03.077020 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mj9vl" podUID="3eaec957-909d-4e80-9bd2-bf765a70a1b1" containerName="registry-server" containerID="cri-o://dae8cf98ed6bf57d1d1ace6188ba8867a9fb2f83ffaa6ff40f9cebd5b6f61939" gracePeriod=2 Dec 03 07:31:03 crc kubenswrapper[4612]: I1203 07:31:03.439502 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:31:03 crc kubenswrapper[4612]: I1203 07:31:03.535235 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eaec957-909d-4e80-9bd2-bf765a70a1b1-utilities\") pod \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\" (UID: \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\") " Dec 03 07:31:03 crc kubenswrapper[4612]: I1203 07:31:03.535407 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqngr\" (UniqueName: \"kubernetes.io/projected/3eaec957-909d-4e80-9bd2-bf765a70a1b1-kube-api-access-dqngr\") pod \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\" (UID: \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\") " Dec 03 07:31:03 crc kubenswrapper[4612]: I1203 07:31:03.535445 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eaec957-909d-4e80-9bd2-bf765a70a1b1-catalog-content\") pod \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\" (UID: \"3eaec957-909d-4e80-9bd2-bf765a70a1b1\") " Dec 03 07:31:03 crc kubenswrapper[4612]: I1203 07:31:03.536189 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3eaec957-909d-4e80-9bd2-bf765a70a1b1-utilities" (OuterVolumeSpecName: "utilities") pod "3eaec957-909d-4e80-9bd2-bf765a70a1b1" (UID: "3eaec957-909d-4e80-9bd2-bf765a70a1b1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:31:03 crc kubenswrapper[4612]: I1203 07:31:03.540512 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3eaec957-909d-4e80-9bd2-bf765a70a1b1-kube-api-access-dqngr" (OuterVolumeSpecName: "kube-api-access-dqngr") pod "3eaec957-909d-4e80-9bd2-bf765a70a1b1" (UID: "3eaec957-909d-4e80-9bd2-bf765a70a1b1"). InnerVolumeSpecName "kube-api-access-dqngr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:31:03 crc kubenswrapper[4612]: I1203 07:31:03.603653 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3eaec957-909d-4e80-9bd2-bf765a70a1b1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3eaec957-909d-4e80-9bd2-bf765a70a1b1" (UID: "3eaec957-909d-4e80-9bd2-bf765a70a1b1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:31:03 crc kubenswrapper[4612]: I1203 07:31:03.637173 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqngr\" (UniqueName: \"kubernetes.io/projected/3eaec957-909d-4e80-9bd2-bf765a70a1b1-kube-api-access-dqngr\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:03 crc kubenswrapper[4612]: I1203 07:31:03.637211 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eaec957-909d-4e80-9bd2-bf765a70a1b1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:03 crc kubenswrapper[4612]: I1203 07:31:03.637279 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eaec957-909d-4e80-9bd2-bf765a70a1b1-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.084708 4612 generic.go:334] "Generic (PLEG): container finished" podID="3eaec957-909d-4e80-9bd2-bf765a70a1b1" containerID="dae8cf98ed6bf57d1d1ace6188ba8867a9fb2f83ffaa6ff40f9cebd5b6f61939" exitCode=0 Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.084860 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mj9vl" event={"ID":"3eaec957-909d-4e80-9bd2-bf765a70a1b1","Type":"ContainerDied","Data":"dae8cf98ed6bf57d1d1ace6188ba8867a9fb2f83ffaa6ff40f9cebd5b6f61939"} Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.085049 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mj9vl" event={"ID":"3eaec957-909d-4e80-9bd2-bf765a70a1b1","Type":"ContainerDied","Data":"6bacb085b613bac8de87ea5bf575c62fc5495d0f1c2c40e32b57515054646723"} Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.085078 4612 scope.go:117] "RemoveContainer" containerID="dae8cf98ed6bf57d1d1ace6188ba8867a9fb2f83ffaa6ff40f9cebd5b6f61939" Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.084924 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mj9vl" Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.116404 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mj9vl"] Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.123783 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mj9vl"] Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.124619 4612 scope.go:117] "RemoveContainer" containerID="5fce832a1aff630dfe5123cae700e362af046d73a6d9190576b52ec5005dab94" Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.144707 4612 scope.go:117] "RemoveContainer" containerID="727761e95c64857c9473f8c44aacd4ede8e35effdf5e5cb7c3e6b54600d462e1" Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.159234 4612 scope.go:117] "RemoveContainer" containerID="dae8cf98ed6bf57d1d1ace6188ba8867a9fb2f83ffaa6ff40f9cebd5b6f61939" Dec 03 07:31:04 crc kubenswrapper[4612]: E1203 07:31:04.159580 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dae8cf98ed6bf57d1d1ace6188ba8867a9fb2f83ffaa6ff40f9cebd5b6f61939\": container with ID starting with dae8cf98ed6bf57d1d1ace6188ba8867a9fb2f83ffaa6ff40f9cebd5b6f61939 not found: ID does not exist" containerID="dae8cf98ed6bf57d1d1ace6188ba8867a9fb2f83ffaa6ff40f9cebd5b6f61939" Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.159612 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dae8cf98ed6bf57d1d1ace6188ba8867a9fb2f83ffaa6ff40f9cebd5b6f61939"} err="failed to get container status \"dae8cf98ed6bf57d1d1ace6188ba8867a9fb2f83ffaa6ff40f9cebd5b6f61939\": rpc error: code = NotFound desc = could not find container \"dae8cf98ed6bf57d1d1ace6188ba8867a9fb2f83ffaa6ff40f9cebd5b6f61939\": container with ID starting with dae8cf98ed6bf57d1d1ace6188ba8867a9fb2f83ffaa6ff40f9cebd5b6f61939 not found: ID does not exist" Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.159631 4612 scope.go:117] "RemoveContainer" containerID="5fce832a1aff630dfe5123cae700e362af046d73a6d9190576b52ec5005dab94" Dec 03 07:31:04 crc kubenswrapper[4612]: E1203 07:31:04.159880 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fce832a1aff630dfe5123cae700e362af046d73a6d9190576b52ec5005dab94\": container with ID starting with 5fce832a1aff630dfe5123cae700e362af046d73a6d9190576b52ec5005dab94 not found: ID does not exist" containerID="5fce832a1aff630dfe5123cae700e362af046d73a6d9190576b52ec5005dab94" Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.159906 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fce832a1aff630dfe5123cae700e362af046d73a6d9190576b52ec5005dab94"} err="failed to get container status \"5fce832a1aff630dfe5123cae700e362af046d73a6d9190576b52ec5005dab94\": rpc error: code = NotFound desc = could not find container \"5fce832a1aff630dfe5123cae700e362af046d73a6d9190576b52ec5005dab94\": container with ID starting with 5fce832a1aff630dfe5123cae700e362af046d73a6d9190576b52ec5005dab94 not found: ID does not exist" Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.159923 4612 scope.go:117] "RemoveContainer" containerID="727761e95c64857c9473f8c44aacd4ede8e35effdf5e5cb7c3e6b54600d462e1" Dec 03 07:31:04 crc kubenswrapper[4612]: E1203 07:31:04.160388 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"727761e95c64857c9473f8c44aacd4ede8e35effdf5e5cb7c3e6b54600d462e1\": container with ID starting with 727761e95c64857c9473f8c44aacd4ede8e35effdf5e5cb7c3e6b54600d462e1 not found: ID does not exist" containerID="727761e95c64857c9473f8c44aacd4ede8e35effdf5e5cb7c3e6b54600d462e1" Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.160406 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"727761e95c64857c9473f8c44aacd4ede8e35effdf5e5cb7c3e6b54600d462e1"} err="failed to get container status \"727761e95c64857c9473f8c44aacd4ede8e35effdf5e5cb7c3e6b54600d462e1\": rpc error: code = NotFound desc = could not find container \"727761e95c64857c9473f8c44aacd4ede8e35effdf5e5cb7c3e6b54600d462e1\": container with ID starting with 727761e95c64857c9473f8c44aacd4ede8e35effdf5e5cb7c3e6b54600d462e1 not found: ID does not exist" Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.991728 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nj5zt"] Dec 03 07:31:04 crc kubenswrapper[4612]: I1203 07:31:04.992052 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nj5zt" podUID="cdd62db3-0bae-4037-915e-a339d0ecc7a6" containerName="registry-server" containerID="cri-o://a8a717c788fa8cfcb589a3d6bb68cf35739f6ed6f1ce3772f1e1b0df56a56e33" gracePeriod=30 Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.002102 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-88rgv"] Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.002335 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-88rgv" podUID="75a8ee2e-4160-4458-9107-9d2a276edb94" containerName="registry-server" containerID="cri-o://a729c8fccede3517017f78cdf57263cd8c7a9157ba1af26b4040df01ccac15cc" gracePeriod=30 Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.007776 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r42pw"] Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.007991 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" podUID="0a9d1713-389c-4010-b725-3e51fbd8750d" containerName="marketplace-operator" containerID="cri-o://eb3034456614a776c289093a9ad443a76c270c47c063bf5c547161b614a498c2" gracePeriod=30 Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.014424 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r669p"] Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.014641 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-r669p" podUID="faf2b695-863f-4f58-b53a-66aa4addd381" containerName="registry-server" containerID="cri-o://d72af35cb93267dc65cb78da2cd510f41f4161d30c1b49ccc876e4ffde00951d" gracePeriod=30 Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.028248 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k6trx"] Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.028476 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-k6trx" podUID="3625877f-a938-482d-90ed-f5fce8ef232d" containerName="registry-server" containerID="cri-o://3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c" gracePeriod=30 Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.037314 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p52h2"] Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.037532 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-p52h2" podUID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" containerName="registry-server" containerID="cri-o://4b1b0b203fa65414efaaf9893100d5c86d7ef3f607f0d84e7dee49c6e01b9a46" gracePeriod=30 Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051237 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-x2kxn"] Dec 03 07:31:05 crc kubenswrapper[4612]: E1203 07:31:05.051517 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3eaec957-909d-4e80-9bd2-bf765a70a1b1" containerName="extract-utilities" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051537 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3eaec957-909d-4e80-9bd2-bf765a70a1b1" containerName="extract-utilities" Dec 03 07:31:05 crc kubenswrapper[4612]: E1203 07:31:05.051553 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" containerName="registry-server" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051562 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" containerName="registry-server" Dec 03 07:31:05 crc kubenswrapper[4612]: E1203 07:31:05.051571 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3eaec957-909d-4e80-9bd2-bf765a70a1b1" containerName="extract-content" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051578 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3eaec957-909d-4e80-9bd2-bf765a70a1b1" containerName="extract-content" Dec 03 07:31:05 crc kubenswrapper[4612]: E1203 07:31:05.051589 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" containerName="extract-content" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051602 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" containerName="extract-content" Dec 03 07:31:05 crc kubenswrapper[4612]: E1203 07:31:05.051612 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3eaec957-909d-4e80-9bd2-bf765a70a1b1" containerName="registry-server" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051620 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3eaec957-909d-4e80-9bd2-bf765a70a1b1" containerName="registry-server" Dec 03 07:31:05 crc kubenswrapper[4612]: E1203 07:31:05.051629 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b11d0417-0152-4c68-871a-b7e40af003e4" containerName="extract-utilities" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051636 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="b11d0417-0152-4c68-871a-b7e40af003e4" containerName="extract-utilities" Dec 03 07:31:05 crc kubenswrapper[4612]: E1203 07:31:05.051649 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" containerName="extract-utilities" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051657 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" containerName="extract-utilities" Dec 03 07:31:05 crc kubenswrapper[4612]: E1203 07:31:05.051667 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b11d0417-0152-4c68-871a-b7e40af003e4" containerName="extract-content" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051674 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="b11d0417-0152-4c68-871a-b7e40af003e4" containerName="extract-content" Dec 03 07:31:05 crc kubenswrapper[4612]: E1203 07:31:05.051685 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f9acfd5-2f7a-4b96-a233-9ef887c45986" containerName="pruner" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051694 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f9acfd5-2f7a-4b96-a233-9ef887c45986" containerName="pruner" Dec 03 07:31:05 crc kubenswrapper[4612]: E1203 07:31:05.051713 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b11d0417-0152-4c68-871a-b7e40af003e4" containerName="registry-server" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051720 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="b11d0417-0152-4c68-871a-b7e40af003e4" containerName="registry-server" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051832 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e4b6011-fca0-4a0e-b6ae-dedc13aeb8b7" containerName="registry-server" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051852 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="b11d0417-0152-4c68-871a-b7e40af003e4" containerName="registry-server" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051865 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f9acfd5-2f7a-4b96-a233-9ef887c45986" containerName="pruner" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.051876 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="3eaec957-909d-4e80-9bd2-bf765a70a1b1" containerName="registry-server" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.052365 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.062912 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-x2kxn"] Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.096441 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3eaec957-909d-4e80-9bd2-bf765a70a1b1" path="/var/lib/kubelet/pods/3eaec957-909d-4e80-9bd2-bf765a70a1b1/volumes" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.156813 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4db345c6-2ee2-4acf-9be4-a705bddb07fe-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-x2kxn\" (UID: \"4db345c6-2ee2-4acf-9be4-a705bddb07fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.156870 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4db345c6-2ee2-4acf-9be4-a705bddb07fe-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-x2kxn\" (UID: \"4db345c6-2ee2-4acf-9be4-a705bddb07fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.156933 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzxd2\" (UniqueName: \"kubernetes.io/projected/4db345c6-2ee2-4acf-9be4-a705bddb07fe-kube-api-access-xzxd2\") pod \"marketplace-operator-79b997595-x2kxn\" (UID: \"4db345c6-2ee2-4acf-9be4-a705bddb07fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.258707 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4db345c6-2ee2-4acf-9be4-a705bddb07fe-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-x2kxn\" (UID: \"4db345c6-2ee2-4acf-9be4-a705bddb07fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.258762 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4db345c6-2ee2-4acf-9be4-a705bddb07fe-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-x2kxn\" (UID: \"4db345c6-2ee2-4acf-9be4-a705bddb07fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.258814 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzxd2\" (UniqueName: \"kubernetes.io/projected/4db345c6-2ee2-4acf-9be4-a705bddb07fe-kube-api-access-xzxd2\") pod \"marketplace-operator-79b997595-x2kxn\" (UID: \"4db345c6-2ee2-4acf-9be4-a705bddb07fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.260668 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4db345c6-2ee2-4acf-9be4-a705bddb07fe-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-x2kxn\" (UID: \"4db345c6-2ee2-4acf-9be4-a705bddb07fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.279588 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4db345c6-2ee2-4acf-9be4-a705bddb07fe-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-x2kxn\" (UID: \"4db345c6-2ee2-4acf-9be4-a705bddb07fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.282302 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzxd2\" (UniqueName: \"kubernetes.io/projected/4db345c6-2ee2-4acf-9be4-a705bddb07fe-kube-api-access-xzxd2\") pod \"marketplace-operator-79b997595-x2kxn\" (UID: \"4db345c6-2ee2-4acf-9be4-a705bddb07fe\") " pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" Dec 03 07:31:05 crc kubenswrapper[4612]: E1203 07:31:05.441897 4612 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c is running failed: container process not found" containerID="3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 07:31:05 crc kubenswrapper[4612]: E1203 07:31:05.442547 4612 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c is running failed: container process not found" containerID="3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 07:31:05 crc kubenswrapper[4612]: E1203 07:31:05.442844 4612 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c is running failed: container process not found" containerID="3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 07:31:05 crc kubenswrapper[4612]: E1203 07:31:05.442889 4612 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-k6trx" podUID="3625877f-a938-482d-90ed-f5fce8ef232d" containerName="registry-server" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.508296 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.524765 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.538312 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.566806 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-catalog-content\") pod \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\" (UID: \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\") " Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.566887 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-utilities\") pod \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\" (UID: \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\") " Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.566954 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3625877f-a938-482d-90ed-f5fce8ef232d-catalog-content\") pod \"3625877f-a938-482d-90ed-f5fce8ef232d\" (UID: \"3625877f-a938-482d-90ed-f5fce8ef232d\") " Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.566985 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2txk\" (UniqueName: \"kubernetes.io/projected/3625877f-a938-482d-90ed-f5fce8ef232d-kube-api-access-n2txk\") pod \"3625877f-a938-482d-90ed-f5fce8ef232d\" (UID: \"3625877f-a938-482d-90ed-f5fce8ef232d\") " Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.567043 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2ngh\" (UniqueName: \"kubernetes.io/projected/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-kube-api-access-q2ngh\") pod \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\" (UID: \"0f3dde25-37e5-4b10-95f4-18e573b0e5a4\") " Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.567130 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3625877f-a938-482d-90ed-f5fce8ef232d-utilities\") pod \"3625877f-a938-482d-90ed-f5fce8ef232d\" (UID: \"3625877f-a938-482d-90ed-f5fce8ef232d\") " Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.571606 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3625877f-a938-482d-90ed-f5fce8ef232d-utilities" (OuterVolumeSpecName: "utilities") pod "3625877f-a938-482d-90ed-f5fce8ef232d" (UID: "3625877f-a938-482d-90ed-f5fce8ef232d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.576882 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-utilities" (OuterVolumeSpecName: "utilities") pod "0f3dde25-37e5-4b10-95f4-18e573b0e5a4" (UID: "0f3dde25-37e5-4b10-95f4-18e573b0e5a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.612164 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3625877f-a938-482d-90ed-f5fce8ef232d-kube-api-access-n2txk" (OuterVolumeSpecName: "kube-api-access-n2txk") pod "3625877f-a938-482d-90ed-f5fce8ef232d" (UID: "3625877f-a938-482d-90ed-f5fce8ef232d"). InnerVolumeSpecName "kube-api-access-n2txk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.628096 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-kube-api-access-q2ngh" (OuterVolumeSpecName: "kube-api-access-q2ngh") pod "0f3dde25-37e5-4b10-95f4-18e573b0e5a4" (UID: "0f3dde25-37e5-4b10-95f4-18e573b0e5a4"). InnerVolumeSpecName "kube-api-access-q2ngh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.669788 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2txk\" (UniqueName: \"kubernetes.io/projected/3625877f-a938-482d-90ed-f5fce8ef232d-kube-api-access-n2txk\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.669815 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2ngh\" (UniqueName: \"kubernetes.io/projected/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-kube-api-access-q2ngh\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.669827 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3625877f-a938-482d-90ed-f5fce8ef232d-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.669836 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.818873 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tznzj"] Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.903350 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3625877f-a938-482d-90ed-f5fce8ef232d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3625877f-a938-482d-90ed-f5fce8ef232d" (UID: "3625877f-a938-482d-90ed-f5fce8ef232d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.962167 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0f3dde25-37e5-4b10-95f4-18e573b0e5a4" (UID: "0f3dde25-37e5-4b10-95f4-18e573b0e5a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.985908 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3dde25-37e5-4b10-95f4-18e573b0e5a4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.985995 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3625877f-a938-482d-90ed-f5fce8ef232d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:05 crc kubenswrapper[4612]: I1203 07:31:05.998288 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.087118 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faf2b695-863f-4f58-b53a-66aa4addd381-catalog-content\") pod \"faf2b695-863f-4f58-b53a-66aa4addd381\" (UID: \"faf2b695-863f-4f58-b53a-66aa4addd381\") " Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.087490 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nmrk\" (UniqueName: \"kubernetes.io/projected/faf2b695-863f-4f58-b53a-66aa4addd381-kube-api-access-6nmrk\") pod \"faf2b695-863f-4f58-b53a-66aa4addd381\" (UID: \"faf2b695-863f-4f58-b53a-66aa4addd381\") " Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.087612 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faf2b695-863f-4f58-b53a-66aa4addd381-utilities\") pod \"faf2b695-863f-4f58-b53a-66aa4addd381\" (UID: \"faf2b695-863f-4f58-b53a-66aa4addd381\") " Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.090345 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faf2b695-863f-4f58-b53a-66aa4addd381-utilities" (OuterVolumeSpecName: "utilities") pod "faf2b695-863f-4f58-b53a-66aa4addd381" (UID: "faf2b695-863f-4f58-b53a-66aa4addd381"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.092537 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/faf2b695-863f-4f58-b53a-66aa4addd381-kube-api-access-6nmrk" (OuterVolumeSpecName: "kube-api-access-6nmrk") pod "faf2b695-863f-4f58-b53a-66aa4addd381" (UID: "faf2b695-863f-4f58-b53a-66aa4addd381"). InnerVolumeSpecName "kube-api-access-6nmrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.125308 4612 generic.go:334] "Generic (PLEG): container finished" podID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" containerID="4b1b0b203fa65414efaaf9893100d5c86d7ef3f607f0d84e7dee49c6e01b9a46" exitCode=0 Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.125384 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p52h2" event={"ID":"0f3dde25-37e5-4b10-95f4-18e573b0e5a4","Type":"ContainerDied","Data":"4b1b0b203fa65414efaaf9893100d5c86d7ef3f607f0d84e7dee49c6e01b9a46"} Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.125415 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p52h2" event={"ID":"0f3dde25-37e5-4b10-95f4-18e573b0e5a4","Type":"ContainerDied","Data":"2961be54b1836315e2a18f7c03ff9e5d3ba954f548396396fa7cf7f9b941f36f"} Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.125433 4612 scope.go:117] "RemoveContainer" containerID="4b1b0b203fa65414efaaf9893100d5c86d7ef3f607f0d84e7dee49c6e01b9a46" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.125551 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p52h2" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.140758 4612 generic.go:334] "Generic (PLEG): container finished" podID="75a8ee2e-4160-4458-9107-9d2a276edb94" containerID="a729c8fccede3517017f78cdf57263cd8c7a9157ba1af26b4040df01ccac15cc" exitCode=0 Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.141137 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88rgv" event={"ID":"75a8ee2e-4160-4458-9107-9d2a276edb94","Type":"ContainerDied","Data":"a729c8fccede3517017f78cdf57263cd8c7a9157ba1af26b4040df01ccac15cc"} Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.159022 4612 generic.go:334] "Generic (PLEG): container finished" podID="cdd62db3-0bae-4037-915e-a339d0ecc7a6" containerID="a8a717c788fa8cfcb589a3d6bb68cf35739f6ed6f1ce3772f1e1b0df56a56e33" exitCode=0 Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.159837 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nj5zt" event={"ID":"cdd62db3-0bae-4037-915e-a339d0ecc7a6","Type":"ContainerDied","Data":"a8a717c788fa8cfcb589a3d6bb68cf35739f6ed6f1ce3772f1e1b0df56a56e33"} Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.167839 4612 scope.go:117] "RemoveContainer" containerID="d3ae3fa506db00d6613291f500fbff0b9fc780fef1c1d43d2e82365fc9cff51a" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.169649 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faf2b695-863f-4f58-b53a-66aa4addd381-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "faf2b695-863f-4f58-b53a-66aa4addd381" (UID: "faf2b695-863f-4f58-b53a-66aa4addd381"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.171152 4612 generic.go:334] "Generic (PLEG): container finished" podID="0a9d1713-389c-4010-b725-3e51fbd8750d" containerID="eb3034456614a776c289093a9ad443a76c270c47c063bf5c547161b614a498c2" exitCode=0 Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.171664 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" event={"ID":"0a9d1713-389c-4010-b725-3e51fbd8750d","Type":"ContainerDied","Data":"eb3034456614a776c289093a9ad443a76c270c47c063bf5c547161b614a498c2"} Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.194069 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faf2b695-863f-4f58-b53a-66aa4addd381-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.194106 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faf2b695-863f-4f58-b53a-66aa4addd381-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.194120 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nmrk\" (UniqueName: \"kubernetes.io/projected/faf2b695-863f-4f58-b53a-66aa4addd381-kube-api-access-6nmrk\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.208508 4612 generic.go:334] "Generic (PLEG): container finished" podID="3625877f-a938-482d-90ed-f5fce8ef232d" containerID="3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c" exitCode=0 Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.208572 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6trx" event={"ID":"3625877f-a938-482d-90ed-f5fce8ef232d","Type":"ContainerDied","Data":"3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c"} Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.208598 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6trx" event={"ID":"3625877f-a938-482d-90ed-f5fce8ef232d","Type":"ContainerDied","Data":"0e38f5cb495b8360f9cacae25c11c6013787d6c475aae0e70762aff181f61552"} Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.208660 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k6trx" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.223776 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p52h2"] Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.240391 4612 scope.go:117] "RemoveContainer" containerID="4d7f0a7c90d486008d4e732ad78feadf95385c924f9a0f51ecd866e981ecd494" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.254703 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-p52h2"] Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.255551 4612 generic.go:334] "Generic (PLEG): container finished" podID="faf2b695-863f-4f58-b53a-66aa4addd381" containerID="d72af35cb93267dc65cb78da2cd510f41f4161d30c1b49ccc876e4ffde00951d" exitCode=0 Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.255580 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r669p" event={"ID":"faf2b695-863f-4f58-b53a-66aa4addd381","Type":"ContainerDied","Data":"d72af35cb93267dc65cb78da2cd510f41f4161d30c1b49ccc876e4ffde00951d"} Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.255601 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r669p" event={"ID":"faf2b695-863f-4f58-b53a-66aa4addd381","Type":"ContainerDied","Data":"1ce9bc3d7a4ebd233768e2d4f4f8845ef3827b4bfeff54c99af9910cbfa242a7"} Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.255664 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r669p" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.265761 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.268488 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k6trx"] Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.295222 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wt7tx\" (UniqueName: \"kubernetes.io/projected/cdd62db3-0bae-4037-915e-a339d0ecc7a6-kube-api-access-wt7tx\") pod \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\" (UID: \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\") " Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.295298 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdd62db3-0bae-4037-915e-a339d0ecc7a6-utilities\") pod \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\" (UID: \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\") " Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.295327 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdd62db3-0bae-4037-915e-a339d0ecc7a6-catalog-content\") pod \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\" (UID: \"cdd62db3-0bae-4037-915e-a339d0ecc7a6\") " Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.300490 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdd62db3-0bae-4037-915e-a339d0ecc7a6-utilities" (OuterVolumeSpecName: "utilities") pod "cdd62db3-0bae-4037-915e-a339d0ecc7a6" (UID: "cdd62db3-0bae-4037-915e-a339d0ecc7a6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.309485 4612 scope.go:117] "RemoveContainer" containerID="4b1b0b203fa65414efaaf9893100d5c86d7ef3f607f0d84e7dee49c6e01b9a46" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.310992 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdd62db3-0bae-4037-915e-a339d0ecc7a6-kube-api-access-wt7tx" (OuterVolumeSpecName: "kube-api-access-wt7tx") pod "cdd62db3-0bae-4037-915e-a339d0ecc7a6" (UID: "cdd62db3-0bae-4037-915e-a339d0ecc7a6"). InnerVolumeSpecName "kube-api-access-wt7tx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.312971 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-k6trx"] Dec 03 07:31:06 crc kubenswrapper[4612]: E1203 07:31:06.318890 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b1b0b203fa65414efaaf9893100d5c86d7ef3f607f0d84e7dee49c6e01b9a46\": container with ID starting with 4b1b0b203fa65414efaaf9893100d5c86d7ef3f607f0d84e7dee49c6e01b9a46 not found: ID does not exist" containerID="4b1b0b203fa65414efaaf9893100d5c86d7ef3f607f0d84e7dee49c6e01b9a46" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.318990 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b1b0b203fa65414efaaf9893100d5c86d7ef3f607f0d84e7dee49c6e01b9a46"} err="failed to get container status \"4b1b0b203fa65414efaaf9893100d5c86d7ef3f607f0d84e7dee49c6e01b9a46\": rpc error: code = NotFound desc = could not find container \"4b1b0b203fa65414efaaf9893100d5c86d7ef3f607f0d84e7dee49c6e01b9a46\": container with ID starting with 4b1b0b203fa65414efaaf9893100d5c86d7ef3f607f0d84e7dee49c6e01b9a46 not found: ID does not exist" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.319023 4612 scope.go:117] "RemoveContainer" containerID="d3ae3fa506db00d6613291f500fbff0b9fc780fef1c1d43d2e82365fc9cff51a" Dec 03 07:31:06 crc kubenswrapper[4612]: E1203 07:31:06.322506 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3ae3fa506db00d6613291f500fbff0b9fc780fef1c1d43d2e82365fc9cff51a\": container with ID starting with d3ae3fa506db00d6613291f500fbff0b9fc780fef1c1d43d2e82365fc9cff51a not found: ID does not exist" containerID="d3ae3fa506db00d6613291f500fbff0b9fc780fef1c1d43d2e82365fc9cff51a" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.322552 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3ae3fa506db00d6613291f500fbff0b9fc780fef1c1d43d2e82365fc9cff51a"} err="failed to get container status \"d3ae3fa506db00d6613291f500fbff0b9fc780fef1c1d43d2e82365fc9cff51a\": rpc error: code = NotFound desc = could not find container \"d3ae3fa506db00d6613291f500fbff0b9fc780fef1c1d43d2e82365fc9cff51a\": container with ID starting with d3ae3fa506db00d6613291f500fbff0b9fc780fef1c1d43d2e82365fc9cff51a not found: ID does not exist" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.322584 4612 scope.go:117] "RemoveContainer" containerID="4d7f0a7c90d486008d4e732ad78feadf95385c924f9a0f51ecd866e981ecd494" Dec 03 07:31:06 crc kubenswrapper[4612]: E1203 07:31:06.331811 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d7f0a7c90d486008d4e732ad78feadf95385c924f9a0f51ecd866e981ecd494\": container with ID starting with 4d7f0a7c90d486008d4e732ad78feadf95385c924f9a0f51ecd866e981ecd494 not found: ID does not exist" containerID="4d7f0a7c90d486008d4e732ad78feadf95385c924f9a0f51ecd866e981ecd494" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.331867 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d7f0a7c90d486008d4e732ad78feadf95385c924f9a0f51ecd866e981ecd494"} err="failed to get container status \"4d7f0a7c90d486008d4e732ad78feadf95385c924f9a0f51ecd866e981ecd494\": rpc error: code = NotFound desc = could not find container \"4d7f0a7c90d486008d4e732ad78feadf95385c924f9a0f51ecd866e981ecd494\": container with ID starting with 4d7f0a7c90d486008d4e732ad78feadf95385c924f9a0f51ecd866e981ecd494 not found: ID does not exist" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.331899 4612 scope.go:117] "RemoveContainer" containerID="3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.366652 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r669p"] Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.372255 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-r669p"] Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.385018 4612 scope.go:117] "RemoveContainer" containerID="c8d623b780aaae936a54f0aad49bf28b4bf33deb8b39284b044a32dccf11552b" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.398367 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wt7tx\" (UniqueName: \"kubernetes.io/projected/cdd62db3-0bae-4037-915e-a339d0ecc7a6-kube-api-access-wt7tx\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.398395 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdd62db3-0bae-4037-915e-a339d0ecc7a6-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.461787 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdd62db3-0bae-4037-915e-a339d0ecc7a6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cdd62db3-0bae-4037-915e-a339d0ecc7a6" (UID: "cdd62db3-0bae-4037-915e-a339d0ecc7a6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.468276 4612 scope.go:117] "RemoveContainer" containerID="b48e75fa15cb3516931ba7f0815ecb77baca90b99402ebd828e09e1d9bc3c367" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.499273 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdd62db3-0bae-4037-915e-a339d0ecc7a6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.529831 4612 scope.go:117] "RemoveContainer" containerID="3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c" Dec 03 07:31:06 crc kubenswrapper[4612]: E1203 07:31:06.533086 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c\": container with ID starting with 3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c not found: ID does not exist" containerID="3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.533113 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c"} err="failed to get container status \"3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c\": rpc error: code = NotFound desc = could not find container \"3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c\": container with ID starting with 3da2201cc3ae65468684f6505d9ce10497262b630cd7f0cc5dbefe99169bf16c not found: ID does not exist" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.533133 4612 scope.go:117] "RemoveContainer" containerID="c8d623b780aaae936a54f0aad49bf28b4bf33deb8b39284b044a32dccf11552b" Dec 03 07:31:06 crc kubenswrapper[4612]: E1203 07:31:06.533427 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8d623b780aaae936a54f0aad49bf28b4bf33deb8b39284b044a32dccf11552b\": container with ID starting with c8d623b780aaae936a54f0aad49bf28b4bf33deb8b39284b044a32dccf11552b not found: ID does not exist" containerID="c8d623b780aaae936a54f0aad49bf28b4bf33deb8b39284b044a32dccf11552b" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.533445 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8d623b780aaae936a54f0aad49bf28b4bf33deb8b39284b044a32dccf11552b"} err="failed to get container status \"c8d623b780aaae936a54f0aad49bf28b4bf33deb8b39284b044a32dccf11552b\": rpc error: code = NotFound desc = could not find container \"c8d623b780aaae936a54f0aad49bf28b4bf33deb8b39284b044a32dccf11552b\": container with ID starting with c8d623b780aaae936a54f0aad49bf28b4bf33deb8b39284b044a32dccf11552b not found: ID does not exist" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.533459 4612 scope.go:117] "RemoveContainer" containerID="b48e75fa15cb3516931ba7f0815ecb77baca90b99402ebd828e09e1d9bc3c367" Dec 03 07:31:06 crc kubenswrapper[4612]: E1203 07:31:06.533925 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b48e75fa15cb3516931ba7f0815ecb77baca90b99402ebd828e09e1d9bc3c367\": container with ID starting with b48e75fa15cb3516931ba7f0815ecb77baca90b99402ebd828e09e1d9bc3c367 not found: ID does not exist" containerID="b48e75fa15cb3516931ba7f0815ecb77baca90b99402ebd828e09e1d9bc3c367" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.533968 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b48e75fa15cb3516931ba7f0815ecb77baca90b99402ebd828e09e1d9bc3c367"} err="failed to get container status \"b48e75fa15cb3516931ba7f0815ecb77baca90b99402ebd828e09e1d9bc3c367\": rpc error: code = NotFound desc = could not find container \"b48e75fa15cb3516931ba7f0815ecb77baca90b99402ebd828e09e1d9bc3c367\": container with ID starting with b48e75fa15cb3516931ba7f0815ecb77baca90b99402ebd828e09e1d9bc3c367 not found: ID does not exist" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.533983 4612 scope.go:117] "RemoveContainer" containerID="d72af35cb93267dc65cb78da2cd510f41f4161d30c1b49ccc876e4ffde00951d" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.535093 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.557117 4612 scope.go:117] "RemoveContainer" containerID="8c4dcf81f2355e7099daa32f62d57c5530aa37292b6e7e1412b4c284b4d637af" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.561396 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.575088 4612 scope.go:117] "RemoveContainer" containerID="6be905078a93db5ab54607f87465c07ba75a93c3051f6dd2bb11d819f282aa53" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.599980 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a9d1713-389c-4010-b725-3e51fbd8750d-marketplace-trusted-ca\") pod \"0a9d1713-389c-4010-b725-3e51fbd8750d\" (UID: \"0a9d1713-389c-4010-b725-3e51fbd8750d\") " Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.600036 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzlgf\" (UniqueName: \"kubernetes.io/projected/0a9d1713-389c-4010-b725-3e51fbd8750d-kube-api-access-nzlgf\") pod \"0a9d1713-389c-4010-b725-3e51fbd8750d\" (UID: \"0a9d1713-389c-4010-b725-3e51fbd8750d\") " Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.600090 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0a9d1713-389c-4010-b725-3e51fbd8750d-marketplace-operator-metrics\") pod \"0a9d1713-389c-4010-b725-3e51fbd8750d\" (UID: \"0a9d1713-389c-4010-b725-3e51fbd8750d\") " Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.600143 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msg54\" (UniqueName: \"kubernetes.io/projected/75a8ee2e-4160-4458-9107-9d2a276edb94-kube-api-access-msg54\") pod \"75a8ee2e-4160-4458-9107-9d2a276edb94\" (UID: \"75a8ee2e-4160-4458-9107-9d2a276edb94\") " Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.600170 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75a8ee2e-4160-4458-9107-9d2a276edb94-utilities\") pod \"75a8ee2e-4160-4458-9107-9d2a276edb94\" (UID: \"75a8ee2e-4160-4458-9107-9d2a276edb94\") " Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.600202 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75a8ee2e-4160-4458-9107-9d2a276edb94-catalog-content\") pod \"75a8ee2e-4160-4458-9107-9d2a276edb94\" (UID: \"75a8ee2e-4160-4458-9107-9d2a276edb94\") " Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.600664 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a9d1713-389c-4010-b725-3e51fbd8750d-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "0a9d1713-389c-4010-b725-3e51fbd8750d" (UID: "0a9d1713-389c-4010-b725-3e51fbd8750d"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.601523 4612 scope.go:117] "RemoveContainer" containerID="d72af35cb93267dc65cb78da2cd510f41f4161d30c1b49ccc876e4ffde00951d" Dec 03 07:31:06 crc kubenswrapper[4612]: E1203 07:31:06.602872 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d72af35cb93267dc65cb78da2cd510f41f4161d30c1b49ccc876e4ffde00951d\": container with ID starting with d72af35cb93267dc65cb78da2cd510f41f4161d30c1b49ccc876e4ffde00951d not found: ID does not exist" containerID="d72af35cb93267dc65cb78da2cd510f41f4161d30c1b49ccc876e4ffde00951d" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.602930 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d72af35cb93267dc65cb78da2cd510f41f4161d30c1b49ccc876e4ffde00951d"} err="failed to get container status \"d72af35cb93267dc65cb78da2cd510f41f4161d30c1b49ccc876e4ffde00951d\": rpc error: code = NotFound desc = could not find container \"d72af35cb93267dc65cb78da2cd510f41f4161d30c1b49ccc876e4ffde00951d\": container with ID starting with d72af35cb93267dc65cb78da2cd510f41f4161d30c1b49ccc876e4ffde00951d not found: ID does not exist" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.603017 4612 scope.go:117] "RemoveContainer" containerID="8c4dcf81f2355e7099daa32f62d57c5530aa37292b6e7e1412b4c284b4d637af" Dec 03 07:31:06 crc kubenswrapper[4612]: E1203 07:31:06.603605 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c4dcf81f2355e7099daa32f62d57c5530aa37292b6e7e1412b4c284b4d637af\": container with ID starting with 8c4dcf81f2355e7099daa32f62d57c5530aa37292b6e7e1412b4c284b4d637af not found: ID does not exist" containerID="8c4dcf81f2355e7099daa32f62d57c5530aa37292b6e7e1412b4c284b4d637af" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.603653 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c4dcf81f2355e7099daa32f62d57c5530aa37292b6e7e1412b4c284b4d637af"} err="failed to get container status \"8c4dcf81f2355e7099daa32f62d57c5530aa37292b6e7e1412b4c284b4d637af\": rpc error: code = NotFound desc = could not find container \"8c4dcf81f2355e7099daa32f62d57c5530aa37292b6e7e1412b4c284b4d637af\": container with ID starting with 8c4dcf81f2355e7099daa32f62d57c5530aa37292b6e7e1412b4c284b4d637af not found: ID does not exist" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.603676 4612 scope.go:117] "RemoveContainer" containerID="6be905078a93db5ab54607f87465c07ba75a93c3051f6dd2bb11d819f282aa53" Dec 03 07:31:06 crc kubenswrapper[4612]: E1203 07:31:06.604322 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6be905078a93db5ab54607f87465c07ba75a93c3051f6dd2bb11d819f282aa53\": container with ID starting with 6be905078a93db5ab54607f87465c07ba75a93c3051f6dd2bb11d819f282aa53 not found: ID does not exist" containerID="6be905078a93db5ab54607f87465c07ba75a93c3051f6dd2bb11d819f282aa53" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.604406 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6be905078a93db5ab54607f87465c07ba75a93c3051f6dd2bb11d819f282aa53"} err="failed to get container status \"6be905078a93db5ab54607f87465c07ba75a93c3051f6dd2bb11d819f282aa53\": rpc error: code = NotFound desc = could not find container \"6be905078a93db5ab54607f87465c07ba75a93c3051f6dd2bb11d819f282aa53\": container with ID starting with 6be905078a93db5ab54607f87465c07ba75a93c3051f6dd2bb11d819f282aa53 not found: ID does not exist" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.604808 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75a8ee2e-4160-4458-9107-9d2a276edb94-kube-api-access-msg54" (OuterVolumeSpecName: "kube-api-access-msg54") pod "75a8ee2e-4160-4458-9107-9d2a276edb94" (UID: "75a8ee2e-4160-4458-9107-9d2a276edb94"). InnerVolumeSpecName "kube-api-access-msg54". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.605474 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a9d1713-389c-4010-b725-3e51fbd8750d-kube-api-access-nzlgf" (OuterVolumeSpecName: "kube-api-access-nzlgf") pod "0a9d1713-389c-4010-b725-3e51fbd8750d" (UID: "0a9d1713-389c-4010-b725-3e51fbd8750d"). InnerVolumeSpecName "kube-api-access-nzlgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.606467 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75a8ee2e-4160-4458-9107-9d2a276edb94-utilities" (OuterVolumeSpecName: "utilities") pod "75a8ee2e-4160-4458-9107-9d2a276edb94" (UID: "75a8ee2e-4160-4458-9107-9d2a276edb94"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.608029 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a9d1713-389c-4010-b725-3e51fbd8750d-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "0a9d1713-389c-4010-b725-3e51fbd8750d" (UID: "0a9d1713-389c-4010-b725-3e51fbd8750d"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.683554 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75a8ee2e-4160-4458-9107-9d2a276edb94-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "75a8ee2e-4160-4458-9107-9d2a276edb94" (UID: "75a8ee2e-4160-4458-9107-9d2a276edb94"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.701460 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msg54\" (UniqueName: \"kubernetes.io/projected/75a8ee2e-4160-4458-9107-9d2a276edb94-kube-api-access-msg54\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.701485 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75a8ee2e-4160-4458-9107-9d2a276edb94-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.701495 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75a8ee2e-4160-4458-9107-9d2a276edb94-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.701504 4612 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a9d1713-389c-4010-b725-3e51fbd8750d-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.701570 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzlgf\" (UniqueName: \"kubernetes.io/projected/0a9d1713-389c-4010-b725-3e51fbd8750d-kube-api-access-nzlgf\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.701580 4612 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0a9d1713-389c-4010-b725-3e51fbd8750d-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:06 crc kubenswrapper[4612]: I1203 07:31:06.703386 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-x2kxn"] Dec 03 07:31:06 crc kubenswrapper[4612]: W1203 07:31:06.709110 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4db345c6_2ee2_4acf_9be4_a705bddb07fe.slice/crio-98fef18da964906bcd2246b3c066be4ddacd75b7c34037a3bd117a7b5993a871 WatchSource:0}: Error finding container 98fef18da964906bcd2246b3c066be4ddacd75b7c34037a3bd117a7b5993a871: Status 404 returned error can't find the container with id 98fef18da964906bcd2246b3c066be4ddacd75b7c34037a3bd117a7b5993a871 Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.098915 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" path="/var/lib/kubelet/pods/0f3dde25-37e5-4b10-95f4-18e573b0e5a4/volumes" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.100019 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3625877f-a938-482d-90ed-f5fce8ef232d" path="/var/lib/kubelet/pods/3625877f-a938-482d-90ed-f5fce8ef232d/volumes" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.100724 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="faf2b695-863f-4f58-b53a-66aa4addd381" path="/var/lib/kubelet/pods/faf2b695-863f-4f58-b53a-66aa4addd381/volumes" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.273858 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-88rgv" event={"ID":"75a8ee2e-4160-4458-9107-9d2a276edb94","Type":"ContainerDied","Data":"03ad03c541f45ad0d81c9fef88ceb2064c7514ff372814792408ebf5c30ae762"} Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.274266 4612 scope.go:117] "RemoveContainer" containerID="a729c8fccede3517017f78cdf57263cd8c7a9157ba1af26b4040df01ccac15cc" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.273923 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-88rgv" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.288884 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nj5zt" event={"ID":"cdd62db3-0bae-4037-915e-a339d0ecc7a6","Type":"ContainerDied","Data":"5548a8291416cdb58901d36360bced8fb53d1d39f5b312f95f419138558d73f5"} Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.289236 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nj5zt" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.303559 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" event={"ID":"0a9d1713-389c-4010-b725-3e51fbd8750d","Type":"ContainerDied","Data":"519239fab7f029861b009876fff0f14082ecf20e9afe0e395fb28c500040b1c0"} Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.304029 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r42pw" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307164 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-npj5l"] Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307368 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a9d1713-389c-4010-b725-3e51fbd8750d" containerName="marketplace-operator" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307382 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a9d1713-389c-4010-b725-3e51fbd8750d" containerName="marketplace-operator" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307394 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faf2b695-863f-4f58-b53a-66aa4addd381" containerName="extract-content" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307401 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="faf2b695-863f-4f58-b53a-66aa4addd381" containerName="extract-content" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307410 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75a8ee2e-4160-4458-9107-9d2a276edb94" containerName="extract-content" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307418 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="75a8ee2e-4160-4458-9107-9d2a276edb94" containerName="extract-content" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307430 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307439 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307450 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdd62db3-0bae-4037-915e-a339d0ecc7a6" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307458 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdd62db3-0bae-4037-915e-a339d0ecc7a6" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307469 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faf2b695-863f-4f58-b53a-66aa4addd381" containerName="extract-utilities" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307475 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="faf2b695-863f-4f58-b53a-66aa4addd381" containerName="extract-utilities" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307486 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3625877f-a938-482d-90ed-f5fce8ef232d" containerName="extract-utilities" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307493 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3625877f-a938-482d-90ed-f5fce8ef232d" containerName="extract-utilities" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307504 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdd62db3-0bae-4037-915e-a339d0ecc7a6" containerName="extract-content" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307512 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdd62db3-0bae-4037-915e-a339d0ecc7a6" containerName="extract-content" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307520 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faf2b695-863f-4f58-b53a-66aa4addd381" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307526 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="faf2b695-863f-4f58-b53a-66aa4addd381" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307538 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" containerName="extract-utilities" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307546 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" containerName="extract-utilities" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307556 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75a8ee2e-4160-4458-9107-9d2a276edb94" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307563 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="75a8ee2e-4160-4458-9107-9d2a276edb94" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307573 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdd62db3-0bae-4037-915e-a339d0ecc7a6" containerName="extract-utilities" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307580 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdd62db3-0bae-4037-915e-a339d0ecc7a6" containerName="extract-utilities" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307590 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3625877f-a938-482d-90ed-f5fce8ef232d" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307597 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3625877f-a938-482d-90ed-f5fce8ef232d" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307606 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3625877f-a938-482d-90ed-f5fce8ef232d" containerName="extract-content" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307613 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3625877f-a938-482d-90ed-f5fce8ef232d" containerName="extract-content" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307623 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" containerName="extract-content" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307629 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" containerName="extract-content" Dec 03 07:31:07 crc kubenswrapper[4612]: E1203 07:31:07.307638 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75a8ee2e-4160-4458-9107-9d2a276edb94" containerName="extract-utilities" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307646 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="75a8ee2e-4160-4458-9107-9d2a276edb94" containerName="extract-utilities" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307748 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdd62db3-0bae-4037-915e-a339d0ecc7a6" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307759 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a9d1713-389c-4010-b725-3e51fbd8750d" containerName="marketplace-operator" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307827 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f3dde25-37e5-4b10-95f4-18e573b0e5a4" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307839 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="faf2b695-863f-4f58-b53a-66aa4addd381" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307849 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="3625877f-a938-482d-90ed-f5fce8ef232d" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.307864 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="75a8ee2e-4160-4458-9107-9d2a276edb94" containerName="registry-server" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.308676 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.310191 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.318984 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-88rgv"] Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.326171 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-88rgv"] Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.328015 4612 scope.go:117] "RemoveContainer" containerID="d2ae56b720c170f690e49608786b48e04602c80dca4cd8b3096c4c9adccd5cbf" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.339714 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-npj5l"] Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.339758 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.339773 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" event={"ID":"4db345c6-2ee2-4acf-9be4-a705bddb07fe","Type":"ContainerStarted","Data":"8856df8dd3cfe3ef37185659d1c64daa511e14e6556035f19e3bf5ada496695c"} Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.339790 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" event={"ID":"4db345c6-2ee2-4acf-9be4-a705bddb07fe","Type":"ContainerStarted","Data":"98fef18da964906bcd2246b3c066be4ddacd75b7c34037a3bd117a7b5993a871"} Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.346851 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.350804 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nj5zt"] Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.363566 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nj5zt"] Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.374197 4612 scope.go:117] "RemoveContainer" containerID="d2f2c99e8092bcdceebb3ecaa604de1a372736e3577dd7f208cd44425eed8a24" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.377139 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r42pw"] Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.381111 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r42pw"] Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.383522 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-x2kxn" podStartSLOduration=2.383506697 podStartE2EDuration="2.383506697s" podCreationTimestamp="2025-12-03 07:31:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:31:07.381353962 +0000 UTC m=+230.554711362" watchObservedRunningTime="2025-12-03 07:31:07.383506697 +0000 UTC m=+230.556864097" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.411328 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecf12e7f-21e7-40f0-bdb4-e07c8437cef8-utilities\") pod \"redhat-marketplace-npj5l\" (UID: \"ecf12e7f-21e7-40f0-bdb4-e07c8437cef8\") " pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.411635 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmv5p\" (UniqueName: \"kubernetes.io/projected/ecf12e7f-21e7-40f0-bdb4-e07c8437cef8-kube-api-access-nmv5p\") pod \"redhat-marketplace-npj5l\" (UID: \"ecf12e7f-21e7-40f0-bdb4-e07c8437cef8\") " pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.411817 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecf12e7f-21e7-40f0-bdb4-e07c8437cef8-catalog-content\") pod \"redhat-marketplace-npj5l\" (UID: \"ecf12e7f-21e7-40f0-bdb4-e07c8437cef8\") " pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.413418 4612 scope.go:117] "RemoveContainer" containerID="a8a717c788fa8cfcb589a3d6bb68cf35739f6ed6f1ce3772f1e1b0df56a56e33" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.424112 4612 scope.go:117] "RemoveContainer" containerID="cc34f41af25e403e47f8f2ad8c21f3f47a474be7ccdb9e397fcf472a4261ddc4" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.435514 4612 scope.go:117] "RemoveContainer" containerID="d9cdc12edf3c858b6fe7977e958de097ad6ecc5004403a725a3fa87dbdaa375a" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.458004 4612 scope.go:117] "RemoveContainer" containerID="eb3034456614a776c289093a9ad443a76c270c47c063bf5c547161b614a498c2" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.512880 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecf12e7f-21e7-40f0-bdb4-e07c8437cef8-utilities\") pod \"redhat-marketplace-npj5l\" (UID: \"ecf12e7f-21e7-40f0-bdb4-e07c8437cef8\") " pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.512933 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmv5p\" (UniqueName: \"kubernetes.io/projected/ecf12e7f-21e7-40f0-bdb4-e07c8437cef8-kube-api-access-nmv5p\") pod \"redhat-marketplace-npj5l\" (UID: \"ecf12e7f-21e7-40f0-bdb4-e07c8437cef8\") " pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.513010 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecf12e7f-21e7-40f0-bdb4-e07c8437cef8-catalog-content\") pod \"redhat-marketplace-npj5l\" (UID: \"ecf12e7f-21e7-40f0-bdb4-e07c8437cef8\") " pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.513477 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecf12e7f-21e7-40f0-bdb4-e07c8437cef8-catalog-content\") pod \"redhat-marketplace-npj5l\" (UID: \"ecf12e7f-21e7-40f0-bdb4-e07c8437cef8\") " pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.513646 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecf12e7f-21e7-40f0-bdb4-e07c8437cef8-utilities\") pod \"redhat-marketplace-npj5l\" (UID: \"ecf12e7f-21e7-40f0-bdb4-e07c8437cef8\") " pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.528543 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmv5p\" (UniqueName: \"kubernetes.io/projected/ecf12e7f-21e7-40f0-bdb4-e07c8437cef8-kube-api-access-nmv5p\") pod \"redhat-marketplace-npj5l\" (UID: \"ecf12e7f-21e7-40f0-bdb4-e07c8437cef8\") " pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:07 crc kubenswrapper[4612]: I1203 07:31:07.664806 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.053591 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-npj5l"] Dec 03 07:31:08 crc kubenswrapper[4612]: W1203 07:31:08.061715 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podecf12e7f_21e7_40f0_bdb4_e07c8437cef8.slice/crio-95f4dfd4b8b611e53687d5d2865934c274a6aa2738933040345ddebccb4a1e34 WatchSource:0}: Error finding container 95f4dfd4b8b611e53687d5d2865934c274a6aa2738933040345ddebccb4a1e34: Status 404 returned error can't find the container with id 95f4dfd4b8b611e53687d5d2865934c274a6aa2738933040345ddebccb4a1e34 Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.294545 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-x4zn7"] Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.295847 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.298556 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.301807 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x4zn7"] Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.322993 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1ce7b50-2a84-44dc-9398-24bc9f03f745-catalog-content\") pod \"community-operators-x4zn7\" (UID: \"c1ce7b50-2a84-44dc-9398-24bc9f03f745\") " pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.323030 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1ce7b50-2a84-44dc-9398-24bc9f03f745-utilities\") pod \"community-operators-x4zn7\" (UID: \"c1ce7b50-2a84-44dc-9398-24bc9f03f745\") " pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.323056 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2l2j\" (UniqueName: \"kubernetes.io/projected/c1ce7b50-2a84-44dc-9398-24bc9f03f745-kube-api-access-g2l2j\") pod \"community-operators-x4zn7\" (UID: \"c1ce7b50-2a84-44dc-9398-24bc9f03f745\") " pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.348638 4612 generic.go:334] "Generic (PLEG): container finished" podID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" containerID="844b1ce499a96b821d9dc418292677a84a5576ffea12345b9477b01adc921979" exitCode=0 Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.348732 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-npj5l" event={"ID":"ecf12e7f-21e7-40f0-bdb4-e07c8437cef8","Type":"ContainerDied","Data":"844b1ce499a96b821d9dc418292677a84a5576ffea12345b9477b01adc921979"} Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.348765 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-npj5l" event={"ID":"ecf12e7f-21e7-40f0-bdb4-e07c8437cef8","Type":"ContainerStarted","Data":"95f4dfd4b8b611e53687d5d2865934c274a6aa2738933040345ddebccb4a1e34"} Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.424048 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1ce7b50-2a84-44dc-9398-24bc9f03f745-catalog-content\") pod \"community-operators-x4zn7\" (UID: \"c1ce7b50-2a84-44dc-9398-24bc9f03f745\") " pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.424093 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1ce7b50-2a84-44dc-9398-24bc9f03f745-utilities\") pod \"community-operators-x4zn7\" (UID: \"c1ce7b50-2a84-44dc-9398-24bc9f03f745\") " pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.424123 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2l2j\" (UniqueName: \"kubernetes.io/projected/c1ce7b50-2a84-44dc-9398-24bc9f03f745-kube-api-access-g2l2j\") pod \"community-operators-x4zn7\" (UID: \"c1ce7b50-2a84-44dc-9398-24bc9f03f745\") " pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.424617 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1ce7b50-2a84-44dc-9398-24bc9f03f745-utilities\") pod \"community-operators-x4zn7\" (UID: \"c1ce7b50-2a84-44dc-9398-24bc9f03f745\") " pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.424738 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1ce7b50-2a84-44dc-9398-24bc9f03f745-catalog-content\") pod \"community-operators-x4zn7\" (UID: \"c1ce7b50-2a84-44dc-9398-24bc9f03f745\") " pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.449322 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2l2j\" (UniqueName: \"kubernetes.io/projected/c1ce7b50-2a84-44dc-9398-24bc9f03f745-kube-api-access-g2l2j\") pod \"community-operators-x4zn7\" (UID: \"c1ce7b50-2a84-44dc-9398-24bc9f03f745\") " pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:08 crc kubenswrapper[4612]: I1203 07:31:08.673310 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.056361 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x4zn7"] Dec 03 07:31:09 crc kubenswrapper[4612]: W1203 07:31:09.069236 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1ce7b50_2a84_44dc_9398_24bc9f03f745.slice/crio-974665d7afdf9a4a7169a88f681ea476de44af25250c6cb48d7838e04454fb08 WatchSource:0}: Error finding container 974665d7afdf9a4a7169a88f681ea476de44af25250c6cb48d7838e04454fb08: Status 404 returned error can't find the container with id 974665d7afdf9a4a7169a88f681ea476de44af25250c6cb48d7838e04454fb08 Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.105181 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a9d1713-389c-4010-b725-3e51fbd8750d" path="/var/lib/kubelet/pods/0a9d1713-389c-4010-b725-3e51fbd8750d/volumes" Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.106003 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75a8ee2e-4160-4458-9107-9d2a276edb94" path="/var/lib/kubelet/pods/75a8ee2e-4160-4458-9107-9d2a276edb94/volumes" Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.106588 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdd62db3-0bae-4037-915e-a339d0ecc7a6" path="/var/lib/kubelet/pods/cdd62db3-0bae-4037-915e-a339d0ecc7a6/volumes" Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.360417 4612 generic.go:334] "Generic (PLEG): container finished" podID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" containerID="7af0ad7be24d71efc2cd1d00dca0fbbb4cbb10532d4b229caaae3deeb3db97d0" exitCode=0 Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.360529 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-npj5l" event={"ID":"ecf12e7f-21e7-40f0-bdb4-e07c8437cef8","Type":"ContainerDied","Data":"7af0ad7be24d71efc2cd1d00dca0fbbb4cbb10532d4b229caaae3deeb3db97d0"} Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.363757 4612 generic.go:334] "Generic (PLEG): container finished" podID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" containerID="3595ca0d866710adae369ea4e3b0a270d05ed0d5a831a92b425cc427493dc6ad" exitCode=0 Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.364389 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4zn7" event={"ID":"c1ce7b50-2a84-44dc-9398-24bc9f03f745","Type":"ContainerDied","Data":"3595ca0d866710adae369ea4e3b0a270d05ed0d5a831a92b425cc427493dc6ad"} Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.364452 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4zn7" event={"ID":"c1ce7b50-2a84-44dc-9398-24bc9f03f745","Type":"ContainerStarted","Data":"974665d7afdf9a4a7169a88f681ea476de44af25250c6cb48d7838e04454fb08"} Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.696319 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qbglv"] Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.697579 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.700035 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.705033 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qbglv"] Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.737767 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3e32e0f-ff72-43ff-8afb-54fbf1be823a-catalog-content\") pod \"redhat-operators-qbglv\" (UID: \"a3e32e0f-ff72-43ff-8afb-54fbf1be823a\") " pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.737862 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fklw\" (UniqueName: \"kubernetes.io/projected/a3e32e0f-ff72-43ff-8afb-54fbf1be823a-kube-api-access-5fklw\") pod \"redhat-operators-qbglv\" (UID: \"a3e32e0f-ff72-43ff-8afb-54fbf1be823a\") " pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.737886 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3e32e0f-ff72-43ff-8afb-54fbf1be823a-utilities\") pod \"redhat-operators-qbglv\" (UID: \"a3e32e0f-ff72-43ff-8afb-54fbf1be823a\") " pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.839324 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fklw\" (UniqueName: \"kubernetes.io/projected/a3e32e0f-ff72-43ff-8afb-54fbf1be823a-kube-api-access-5fklw\") pod \"redhat-operators-qbglv\" (UID: \"a3e32e0f-ff72-43ff-8afb-54fbf1be823a\") " pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.839368 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3e32e0f-ff72-43ff-8afb-54fbf1be823a-utilities\") pod \"redhat-operators-qbglv\" (UID: \"a3e32e0f-ff72-43ff-8afb-54fbf1be823a\") " pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.839423 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3e32e0f-ff72-43ff-8afb-54fbf1be823a-catalog-content\") pod \"redhat-operators-qbglv\" (UID: \"a3e32e0f-ff72-43ff-8afb-54fbf1be823a\") " pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.839817 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3e32e0f-ff72-43ff-8afb-54fbf1be823a-catalog-content\") pod \"redhat-operators-qbglv\" (UID: \"a3e32e0f-ff72-43ff-8afb-54fbf1be823a\") " pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.839972 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3e32e0f-ff72-43ff-8afb-54fbf1be823a-utilities\") pod \"redhat-operators-qbglv\" (UID: \"a3e32e0f-ff72-43ff-8afb-54fbf1be823a\") " pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:09 crc kubenswrapper[4612]: I1203 07:31:09.859892 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fklw\" (UniqueName: \"kubernetes.io/projected/a3e32e0f-ff72-43ff-8afb-54fbf1be823a-kube-api-access-5fklw\") pod \"redhat-operators-qbglv\" (UID: \"a3e32e0f-ff72-43ff-8afb-54fbf1be823a\") " pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.030790 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.225467 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qbglv"] Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.370568 4612 generic.go:334] "Generic (PLEG): container finished" podID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" containerID="575d1fb1361e3f401b637300eb522e9ea4219a5220d8e4a40aaa2ccc838af681" exitCode=0 Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.370647 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4zn7" event={"ID":"c1ce7b50-2a84-44dc-9398-24bc9f03f745","Type":"ContainerDied","Data":"575d1fb1361e3f401b637300eb522e9ea4219a5220d8e4a40aaa2ccc838af681"} Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.373691 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qbglv" event={"ID":"a3e32e0f-ff72-43ff-8afb-54fbf1be823a","Type":"ContainerStarted","Data":"3501eaed0d9d51452dafce9e6b42323d560a8b9cd466015b848aa3fb036751bf"} Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.373717 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qbglv" event={"ID":"a3e32e0f-ff72-43ff-8afb-54fbf1be823a","Type":"ContainerStarted","Data":"ed4e3604f7d7f8988ac50c55dccdf32e6397177f41d90eb2bdc80bb796e71c27"} Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.382696 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-npj5l" event={"ID":"ecf12e7f-21e7-40f0-bdb4-e07c8437cef8","Type":"ContainerStarted","Data":"129af57148a9e67ce59dfcdf5c1a54d47557c4380adc51666e0fdd11d8adc528"} Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.432834 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-npj5l" podStartSLOduration=1.8358267590000001 podStartE2EDuration="3.432816179s" podCreationTimestamp="2025-12-03 07:31:07 +0000 UTC" firstStartedPulling="2025-12-03 07:31:08.350845806 +0000 UTC m=+231.524203206" lastFinishedPulling="2025-12-03 07:31:09.947835226 +0000 UTC m=+233.121192626" observedRunningTime="2025-12-03 07:31:10.431710861 +0000 UTC m=+233.605068261" watchObservedRunningTime="2025-12-03 07:31:10.432816179 +0000 UTC m=+233.606173579" Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.694316 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6hqst"] Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.695531 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.698721 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.745752 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6hqst"] Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.751290 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-catalog-content\") pod \"certified-operators-6hqst\" (UID: \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\") " pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.751343 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpg9l\" (UniqueName: \"kubernetes.io/projected/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-kube-api-access-hpg9l\") pod \"certified-operators-6hqst\" (UID: \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\") " pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.751407 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-utilities\") pod \"certified-operators-6hqst\" (UID: \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\") " pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.852581 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpg9l\" (UniqueName: \"kubernetes.io/projected/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-kube-api-access-hpg9l\") pod \"certified-operators-6hqst\" (UID: \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\") " pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.852693 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-utilities\") pod \"certified-operators-6hqst\" (UID: \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\") " pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.852733 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-catalog-content\") pod \"certified-operators-6hqst\" (UID: \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\") " pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.853250 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-catalog-content\") pod \"certified-operators-6hqst\" (UID: \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\") " pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.853328 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-utilities\") pod \"certified-operators-6hqst\" (UID: \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\") " pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:10 crc kubenswrapper[4612]: I1203 07:31:10.873447 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpg9l\" (UniqueName: \"kubernetes.io/projected/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-kube-api-access-hpg9l\") pod \"certified-operators-6hqst\" (UID: \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\") " pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:11 crc kubenswrapper[4612]: I1203 07:31:11.009524 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:11 crc kubenswrapper[4612]: I1203 07:31:11.390424 4612 generic.go:334] "Generic (PLEG): container finished" podID="a3e32e0f-ff72-43ff-8afb-54fbf1be823a" containerID="3501eaed0d9d51452dafce9e6b42323d560a8b9cd466015b848aa3fb036751bf" exitCode=0 Dec 03 07:31:11 crc kubenswrapper[4612]: I1203 07:31:11.390470 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qbglv" event={"ID":"a3e32e0f-ff72-43ff-8afb-54fbf1be823a","Type":"ContainerDied","Data":"3501eaed0d9d51452dafce9e6b42323d560a8b9cd466015b848aa3fb036751bf"} Dec 03 07:31:11 crc kubenswrapper[4612]: I1203 07:31:11.396904 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4zn7" event={"ID":"c1ce7b50-2a84-44dc-9398-24bc9f03f745","Type":"ContainerStarted","Data":"f61ef6d82d6e05ab468bd23346f2524ef9f46fdefa795cee7bd90f480280c847"} Dec 03 07:31:11 crc kubenswrapper[4612]: I1203 07:31:11.438078 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-x4zn7" podStartSLOduration=1.841009501 podStartE2EDuration="3.438059484s" podCreationTimestamp="2025-12-03 07:31:08 +0000 UTC" firstStartedPulling="2025-12-03 07:31:09.368507437 +0000 UTC m=+232.541864837" lastFinishedPulling="2025-12-03 07:31:10.96555742 +0000 UTC m=+234.138914820" observedRunningTime="2025-12-03 07:31:11.434188606 +0000 UTC m=+234.607546026" watchObservedRunningTime="2025-12-03 07:31:11.438059484 +0000 UTC m=+234.611416884" Dec 03 07:31:11 crc kubenswrapper[4612]: I1203 07:31:11.487833 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6hqst"] Dec 03 07:31:12 crc kubenswrapper[4612]: I1203 07:31:12.410958 4612 generic.go:334] "Generic (PLEG): container finished" podID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" containerID="27a4b321f045e209c43d532d4f5523c3335411ee5d97da905775080df126a865" exitCode=0 Dec 03 07:31:12 crc kubenswrapper[4612]: I1203 07:31:12.411097 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6hqst" event={"ID":"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd","Type":"ContainerDied","Data":"27a4b321f045e209c43d532d4f5523c3335411ee5d97da905775080df126a865"} Dec 03 07:31:12 crc kubenswrapper[4612]: I1203 07:31:12.411479 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6hqst" event={"ID":"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd","Type":"ContainerStarted","Data":"8a8795cb491aba6bc65bfa8e9b6b94ca58864a353228b11dc8c82cbc5b31a4aa"} Dec 03 07:31:12 crc kubenswrapper[4612]: I1203 07:31:12.414046 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qbglv" event={"ID":"a3e32e0f-ff72-43ff-8afb-54fbf1be823a","Type":"ContainerStarted","Data":"56c6e03731a46755495b0e2a82b0a3428460bc1a419f7b55b86417265d80273a"} Dec 03 07:31:13 crc kubenswrapper[4612]: I1203 07:31:13.419800 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6hqst" event={"ID":"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd","Type":"ContainerStarted","Data":"61b5fa07214e07e4f9773be0f48a511cfa4ac4cb2ed5c4b917fb4bb5a31184a3"} Dec 03 07:31:13 crc kubenswrapper[4612]: I1203 07:31:13.421513 4612 generic.go:334] "Generic (PLEG): container finished" podID="a3e32e0f-ff72-43ff-8afb-54fbf1be823a" containerID="56c6e03731a46755495b0e2a82b0a3428460bc1a419f7b55b86417265d80273a" exitCode=0 Dec 03 07:31:13 crc kubenswrapper[4612]: I1203 07:31:13.421564 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qbglv" event={"ID":"a3e32e0f-ff72-43ff-8afb-54fbf1be823a","Type":"ContainerDied","Data":"56c6e03731a46755495b0e2a82b0a3428460bc1a419f7b55b86417265d80273a"} Dec 03 07:31:14 crc kubenswrapper[4612]: I1203 07:31:14.429591 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qbglv" event={"ID":"a3e32e0f-ff72-43ff-8afb-54fbf1be823a","Type":"ContainerStarted","Data":"73bab05f3485e50994499b262e7ba2cd12a7e0859066838fe337697b91a4573e"} Dec 03 07:31:14 crc kubenswrapper[4612]: I1203 07:31:14.433038 4612 generic.go:334] "Generic (PLEG): container finished" podID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" containerID="61b5fa07214e07e4f9773be0f48a511cfa4ac4cb2ed5c4b917fb4bb5a31184a3" exitCode=0 Dec 03 07:31:14 crc kubenswrapper[4612]: I1203 07:31:14.433086 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6hqst" event={"ID":"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd","Type":"ContainerDied","Data":"61b5fa07214e07e4f9773be0f48a511cfa4ac4cb2ed5c4b917fb4bb5a31184a3"} Dec 03 07:31:14 crc kubenswrapper[4612]: I1203 07:31:14.454178 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qbglv" podStartSLOduration=2.711657474 podStartE2EDuration="5.454159729s" podCreationTimestamp="2025-12-03 07:31:09 +0000 UTC" firstStartedPulling="2025-12-03 07:31:11.392842371 +0000 UTC m=+234.566199771" lastFinishedPulling="2025-12-03 07:31:14.135344626 +0000 UTC m=+237.308702026" observedRunningTime="2025-12-03 07:31:14.45380775 +0000 UTC m=+237.627165150" watchObservedRunningTime="2025-12-03 07:31:14.454159729 +0000 UTC m=+237.627517249" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.641597 4612 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.642531 4612 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.642684 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.642860 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40" gracePeriod=15 Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.642876 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a" gracePeriod=15 Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.642968 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278" gracePeriod=15 Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643023 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9" gracePeriod=15 Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643249 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c" gracePeriod=15 Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643340 4612 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 07:31:15 crc kubenswrapper[4612]: E1203 07:31:15.643571 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643591 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 07:31:15 crc kubenswrapper[4612]: E1203 07:31:15.643607 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643614 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 07:31:15 crc kubenswrapper[4612]: E1203 07:31:15.643634 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643639 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 07:31:15 crc kubenswrapper[4612]: E1203 07:31:15.643645 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643651 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 07:31:15 crc kubenswrapper[4612]: E1203 07:31:15.643658 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643664 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 07:31:15 crc kubenswrapper[4612]: E1203 07:31:15.643673 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643678 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643765 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643776 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643781 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643789 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643799 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643807 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 07:31:15 crc kubenswrapper[4612]: E1203 07:31:15.643907 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.643915 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.725089 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.725143 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.725164 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.725205 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.725237 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.725251 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.725286 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.725321 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826341 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826385 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826405 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826425 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826447 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826463 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826496 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826526 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826593 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826628 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826648 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826667 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826685 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826703 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826721 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:15 crc kubenswrapper[4612]: I1203 07:31:15.826742 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:16 crc kubenswrapper[4612]: I1203 07:31:16.444923 4612 generic.go:334] "Generic (PLEG): container finished" podID="010793a3-88fd-4772-bde7-ef4a17d40ba3" containerID="f76d67dba8ce8d2d26e049778f1cd7ffa51940892b09cf7f60c7d144654908e1" exitCode=0 Dec 03 07:31:16 crc kubenswrapper[4612]: I1203 07:31:16.445027 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"010793a3-88fd-4772-bde7-ef4a17d40ba3","Type":"ContainerDied","Data":"f76d67dba8ce8d2d26e049778f1cd7ffa51940892b09cf7f60c7d144654908e1"} Dec 03 07:31:16 crc kubenswrapper[4612]: I1203 07:31:16.446056 4612 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:16 crc kubenswrapper[4612]: I1203 07:31:16.446455 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:16 crc kubenswrapper[4612]: I1203 07:31:16.448166 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 07:31:16 crc kubenswrapper[4612]: I1203 07:31:16.449893 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 07:31:16 crc kubenswrapper[4612]: I1203 07:31:16.450722 4612 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a" exitCode=0 Dec 03 07:31:16 crc kubenswrapper[4612]: I1203 07:31:16.450752 4612 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9" exitCode=0 Dec 03 07:31:16 crc kubenswrapper[4612]: I1203 07:31:16.450764 4612 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c" exitCode=0 Dec 03 07:31:16 crc kubenswrapper[4612]: I1203 07:31:16.450775 4612 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278" exitCode=2 Dec 03 07:31:16 crc kubenswrapper[4612]: I1203 07:31:16.450819 4612 scope.go:117] "RemoveContainer" containerID="f66eb4f20d308af4d9679961c83a7a6874664bfa0d2b1b758b906b3ccf333b47" Dec 03 07:31:16 crc kubenswrapper[4612]: E1203 07:31:16.838601 4612 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.144:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:16 crc kubenswrapper[4612]: I1203 07:31:16.839058 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:16 crc kubenswrapper[4612]: W1203 07:31:16.857801 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-c6aa996aa59a9eb26df5b33e5a8392d620459df55b286e601c01763fd1d264a4 WatchSource:0}: Error finding container c6aa996aa59a9eb26df5b33e5a8392d620459df55b286e601c01763fd1d264a4: Status 404 returned error can't find the container with id c6aa996aa59a9eb26df5b33e5a8392d620459df55b286e601c01763fd1d264a4 Dec 03 07:31:16 crc kubenswrapper[4612]: E1203 07:31:16.862308 4612 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.144:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187da41dcabc104a openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 07:31:16.861431882 +0000 UTC m=+240.034789272,LastTimestamp:2025-12-03 07:31:16.861431882 +0000 UTC m=+240.034789272,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.095435 4612 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.096252 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.461231 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.468300 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6hqst" event={"ID":"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd","Type":"ContainerStarted","Data":"1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f"} Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.469189 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.469613 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.470446 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"4f4f578590a493b6db5c13acc68646b1546286320c0d3abde30f3028ad5eb8c7"} Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.470489 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"c6aa996aa59a9eb26df5b33e5a8392d620459df55b286e601c01763fd1d264a4"} Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.471263 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:17 crc kubenswrapper[4612]: E1203 07:31:17.471338 4612 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.144:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.472038 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.665742 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.665799 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.748276 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.748990 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.749455 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.749671 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.802562 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.802865 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.803040 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.803179 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.954785 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/010793a3-88fd-4772-bde7-ef4a17d40ba3-kube-api-access\") pod \"010793a3-88fd-4772-bde7-ef4a17d40ba3\" (UID: \"010793a3-88fd-4772-bde7-ef4a17d40ba3\") " Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.955213 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/010793a3-88fd-4772-bde7-ef4a17d40ba3-var-lock\") pod \"010793a3-88fd-4772-bde7-ef4a17d40ba3\" (UID: \"010793a3-88fd-4772-bde7-ef4a17d40ba3\") " Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.955241 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/010793a3-88fd-4772-bde7-ef4a17d40ba3-kubelet-dir\") pod \"010793a3-88fd-4772-bde7-ef4a17d40ba3\" (UID: \"010793a3-88fd-4772-bde7-ef4a17d40ba3\") " Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.955259 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/010793a3-88fd-4772-bde7-ef4a17d40ba3-var-lock" (OuterVolumeSpecName: "var-lock") pod "010793a3-88fd-4772-bde7-ef4a17d40ba3" (UID: "010793a3-88fd-4772-bde7-ef4a17d40ba3"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.955372 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/010793a3-88fd-4772-bde7-ef4a17d40ba3-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "010793a3-88fd-4772-bde7-ef4a17d40ba3" (UID: "010793a3-88fd-4772-bde7-ef4a17d40ba3"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.955667 4612 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/010793a3-88fd-4772-bde7-ef4a17d40ba3-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.955679 4612 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/010793a3-88fd-4772-bde7-ef4a17d40ba3-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:17 crc kubenswrapper[4612]: I1203 07:31:17.965193 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/010793a3-88fd-4772-bde7-ef4a17d40ba3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "010793a3-88fd-4772-bde7-ef4a17d40ba3" (UID: "010793a3-88fd-4772-bde7-ef4a17d40ba3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.056754 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/010793a3-88fd-4772-bde7-ef4a17d40ba3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.481931 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"010793a3-88fd-4772-bde7-ef4a17d40ba3","Type":"ContainerDied","Data":"f80d89e784507093c340fd5e06cdc31ce76ff188af6175638af9ad1eb51ebd6b"} Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.482000 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f80d89e784507093c340fd5e06cdc31ce76ff188af6175638af9ad1eb51ebd6b" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.482085 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.487210 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.488070 4612 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40" exitCode=0 Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.498228 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.498993 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.499291 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.548875 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-npj5l" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.549539 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.549794 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.550124 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.674924 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.674990 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.722197 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.722658 4612 status_manager.go:851] "Failed to get status for pod" podUID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" pod="openshift-marketplace/community-operators-x4zn7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-x4zn7\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.723876 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.724192 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.724585 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.815478 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.816869 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.817464 4612 status_manager.go:851] "Failed to get status for pod" podUID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" pod="openshift-marketplace/community-operators-x4zn7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-x4zn7\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.817843 4612 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.818066 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.818252 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.818430 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.967577 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.967676 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.967699 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.967994 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.968025 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:31:18 crc kubenswrapper[4612]: I1203 07:31:18.968040 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.069141 4612 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.069172 4612 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.069183 4612 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.100656 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.494991 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.496098 4612 scope.go:117] "RemoveContainer" containerID="cc22b6b17c6ac315b728c118c8b0eb5159c7b676d44bc0649c0d90a7288d7d0a" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.496328 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.497363 4612 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.497619 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.498355 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.498702 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.498913 4612 status_manager.go:851] "Failed to get status for pod" podUID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" pod="openshift-marketplace/community-operators-x4zn7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-x4zn7\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.503848 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.504415 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.504784 4612 status_manager.go:851] "Failed to get status for pod" podUID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" pod="openshift-marketplace/community-operators-x4zn7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-x4zn7\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.505034 4612 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.505209 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.513235 4612 scope.go:117] "RemoveContainer" containerID="c43085908af81c3ee404052a3c26b0242ca5b058a94b881ef39f8163e5f34fb9" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.542492 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-x4zn7" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.543458 4612 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.544149 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.544281 4612 scope.go:117] "RemoveContainer" containerID="dd10b2e6fb8d3f1cc1c8258bc90b26bcd3cdd1b8555e96721b487de0672f059c" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.544455 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.544722 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.545072 4612 status_manager.go:851] "Failed to get status for pod" podUID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" pod="openshift-marketplace/community-operators-x4zn7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-x4zn7\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.555144 4612 scope.go:117] "RemoveContainer" containerID="1374ef43ccd3fd79da72f56cfb380e7865ccb31dd46c4b234706248c03a44278" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.568205 4612 scope.go:117] "RemoveContainer" containerID="3399eb5a3b96e6184f706a76ffa0df9c324be2713cddf86fe0143606e24bab40" Dec 03 07:31:19 crc kubenswrapper[4612]: I1203 07:31:19.591253 4612 scope.go:117] "RemoveContainer" containerID="73f1b7dc0ad2947ed7f825a08e25e00552959ca6d81786b9d1636d7579ac6346" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.032370 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.032437 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.075089 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.075619 4612 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.075858 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.076331 4612 status_manager.go:851] "Failed to get status for pod" podUID="a3e32e0f-ff72-43ff-8afb-54fbf1be823a" pod="openshift-marketplace/redhat-operators-qbglv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qbglv\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.077062 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.077656 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.077988 4612 status_manager.go:851] "Failed to get status for pod" podUID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" pod="openshift-marketplace/community-operators-x4zn7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-x4zn7\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.547123 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qbglv" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.547562 4612 status_manager.go:851] "Failed to get status for pod" podUID="a3e32e0f-ff72-43ff-8afb-54fbf1be823a" pod="openshift-marketplace/redhat-operators-qbglv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qbglv\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.547892 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.548305 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.548588 4612 status_manager.go:851] "Failed to get status for pod" podUID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" pod="openshift-marketplace/community-operators-x4zn7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-x4zn7\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.548852 4612 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:20 crc kubenswrapper[4612]: I1203 07:31:20.549150 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.010747 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.011561 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.058042 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.058477 4612 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.058825 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.059193 4612 status_manager.go:851] "Failed to get status for pod" podUID="a3e32e0f-ff72-43ff-8afb-54fbf1be823a" pod="openshift-marketplace/redhat-operators-qbglv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qbglv\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.059482 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.059865 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.060320 4612 status_manager.go:851] "Failed to get status for pod" podUID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" pod="openshift-marketplace/community-operators-x4zn7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-x4zn7\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:21 crc kubenswrapper[4612]: E1203 07:31:21.090638 4612 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.144:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187da41dcabc104a openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 07:31:16.861431882 +0000 UTC m=+240.034789272,LastTimestamp:2025-12-03 07:31:16.861431882 +0000 UTC m=+240.034789272,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.547365 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.547806 4612 status_manager.go:851] "Failed to get status for pod" podUID="a3e32e0f-ff72-43ff-8afb-54fbf1be823a" pod="openshift-marketplace/redhat-operators-qbglv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qbglv\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.548253 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.548931 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.549182 4612 status_manager.go:851] "Failed to get status for pod" podUID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" pod="openshift-marketplace/community-operators-x4zn7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-x4zn7\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:21 crc kubenswrapper[4612]: I1203 07:31:21.549477 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:22 crc kubenswrapper[4612]: E1203 07:31:22.206319 4612 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:22 crc kubenswrapper[4612]: E1203 07:31:22.206529 4612 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:22 crc kubenswrapper[4612]: E1203 07:31:22.206753 4612 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:22 crc kubenswrapper[4612]: E1203 07:31:22.206981 4612 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:22 crc kubenswrapper[4612]: E1203 07:31:22.207144 4612 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:22 crc kubenswrapper[4612]: I1203 07:31:22.207162 4612 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 03 07:31:22 crc kubenswrapper[4612]: E1203 07:31:22.207296 4612 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" interval="200ms" Dec 03 07:31:22 crc kubenswrapper[4612]: E1203 07:31:22.408584 4612 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" interval="400ms" Dec 03 07:31:22 crc kubenswrapper[4612]: E1203 07:31:22.810473 4612 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" interval="800ms" Dec 03 07:31:23 crc kubenswrapper[4612]: E1203 07:31:23.612360 4612 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" interval="1.6s" Dec 03 07:31:25 crc kubenswrapper[4612]: E1203 07:31:25.212850 4612 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" interval="3.2s" Dec 03 07:31:27 crc kubenswrapper[4612]: I1203 07:31:27.091971 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:27 crc kubenswrapper[4612]: I1203 07:31:27.092828 4612 status_manager.go:851] "Failed to get status for pod" podUID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" pod="openshift-marketplace/community-operators-x4zn7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-x4zn7\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:27 crc kubenswrapper[4612]: I1203 07:31:27.093170 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:27 crc kubenswrapper[4612]: I1203 07:31:27.093495 4612 status_manager.go:851] "Failed to get status for pod" podUID="a3e32e0f-ff72-43ff-8afb-54fbf1be823a" pod="openshift-marketplace/redhat-operators-qbglv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qbglv\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:27 crc kubenswrapper[4612]: I1203 07:31:27.093768 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:28 crc kubenswrapper[4612]: E1203 07:31:28.413353 4612 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.144:6443: connect: connection refused" interval="6.4s" Dec 03 07:31:29 crc kubenswrapper[4612]: I1203 07:31:29.089102 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:29 crc kubenswrapper[4612]: I1203 07:31:29.090349 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:29 crc kubenswrapper[4612]: I1203 07:31:29.090850 4612 status_manager.go:851] "Failed to get status for pod" podUID="a3e32e0f-ff72-43ff-8afb-54fbf1be823a" pod="openshift-marketplace/redhat-operators-qbglv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qbglv\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:29 crc kubenswrapper[4612]: I1203 07:31:29.091299 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:29 crc kubenswrapper[4612]: I1203 07:31:29.091596 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:29 crc kubenswrapper[4612]: I1203 07:31:29.091890 4612 status_manager.go:851] "Failed to get status for pod" podUID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" pod="openshift-marketplace/community-operators-x4zn7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-x4zn7\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:29 crc kubenswrapper[4612]: I1203 07:31:29.115028 4612 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bb6f136-2fb9-4002-ad0d-206b8e43c6ea" Dec 03 07:31:29 crc kubenswrapper[4612]: I1203 07:31:29.115069 4612 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bb6f136-2fb9-4002-ad0d-206b8e43c6ea" Dec 03 07:31:29 crc kubenswrapper[4612]: E1203 07:31:29.115522 4612 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:29 crc kubenswrapper[4612]: I1203 07:31:29.116020 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:29 crc kubenswrapper[4612]: W1203 07:31:29.141511 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-5a22a7c3a8fad2817ce6c75a18f672135d563d34cbd687e170e002a43bb46c77 WatchSource:0}: Error finding container 5a22a7c3a8fad2817ce6c75a18f672135d563d34cbd687e170e002a43bb46c77: Status 404 returned error can't find the container with id 5a22a7c3a8fad2817ce6c75a18f672135d563d34cbd687e170e002a43bb46c77 Dec 03 07:31:29 crc kubenswrapper[4612]: I1203 07:31:29.548869 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5a22a7c3a8fad2817ce6c75a18f672135d563d34cbd687e170e002a43bb46c77"} Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.556644 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.556703 4612 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3" exitCode=1 Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.556735 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3"} Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.557562 4612 scope.go:117] "RemoveContainer" containerID="82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.557977 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.558422 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.558830 4612 status_manager.go:851] "Failed to get status for pod" podUID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" pod="openshift-marketplace/community-operators-x4zn7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-x4zn7\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.559079 4612 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.559360 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.559613 4612 status_manager.go:851] "Failed to get status for pod" podUID="a3e32e0f-ff72-43ff-8afb-54fbf1be823a" pod="openshift-marketplace/redhat-operators-qbglv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qbglv\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.559928 4612 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="7411db088e9a08a5b85c7466fb6511a1b9e9a4fce4a3cfef02fe24b413f231ed" exitCode=0 Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.559974 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"7411db088e9a08a5b85c7466fb6511a1b9e9a4fce4a3cfef02fe24b413f231ed"} Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.560182 4612 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bb6f136-2fb9-4002-ad0d-206b8e43c6ea" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.560196 4612 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bb6f136-2fb9-4002-ad0d-206b8e43c6ea" Dec 03 07:31:30 crc kubenswrapper[4612]: E1203 07:31:30.560584 4612 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.560635 4612 status_manager.go:851] "Failed to get status for pod" podUID="c1ce7b50-2a84-44dc-9398-24bc9f03f745" pod="openshift-marketplace/community-operators-x4zn7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-x4zn7\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.561068 4612 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.561333 4612 status_manager.go:851] "Failed to get status for pod" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.561664 4612 status_manager.go:851] "Failed to get status for pod" podUID="a3e32e0f-ff72-43ff-8afb-54fbf1be823a" pod="openshift-marketplace/redhat-operators-qbglv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qbglv\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.562084 4612 status_manager.go:851] "Failed to get status for pod" podUID="ecf12e7f-21e7-40f0-bdb4-e07c8437cef8" pod="openshift-marketplace/redhat-marketplace-npj5l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-npj5l\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.562433 4612 status_manager.go:851] "Failed to get status for pod" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" pod="openshift-marketplace/certified-operators-6hqst" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6hqst\": dial tcp 38.102.83.144:6443: connect: connection refused" Dec 03 07:31:30 crc kubenswrapper[4612]: I1203 07:31:30.899733 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" podUID="5fb67397-5138-46d4-9a7f-ec95a9cee2b7" containerName="oauth-openshift" containerID="cri-o://f9837eeee3e08db8dd17ecea628f4ffb237caeb1e91d41d9d028e535ced36f64" gracePeriod=15 Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.250283 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.426724 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2tlqp\" (UniqueName: \"kubernetes.io/projected/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-kube-api-access-2tlqp\") pod \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.427094 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-service-ca\") pod \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.427122 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-trusted-ca-bundle\") pod \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.427164 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-cliconfig\") pod \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.427202 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-session\") pod \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.427222 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-serving-cert\") pod \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.427238 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-idp-0-file-data\") pod \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.427283 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-login\") pod \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.427303 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-audit-policies\") pod \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.427332 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-provider-selection\") pod \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.427353 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-router-certs\") pod \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.427375 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-audit-dir\") pod \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.427399 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-error\") pod \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.427419 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-ocp-branding-template\") pod \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\" (UID: \"5fb67397-5138-46d4-9a7f-ec95a9cee2b7\") " Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.427925 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "5fb67397-5138-46d4-9a7f-ec95a9cee2b7" (UID: "5fb67397-5138-46d4-9a7f-ec95a9cee2b7"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.428279 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "5fb67397-5138-46d4-9a7f-ec95a9cee2b7" (UID: "5fb67397-5138-46d4-9a7f-ec95a9cee2b7"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.428668 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "5fb67397-5138-46d4-9a7f-ec95a9cee2b7" (UID: "5fb67397-5138-46d4-9a7f-ec95a9cee2b7"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.428976 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "5fb67397-5138-46d4-9a7f-ec95a9cee2b7" (UID: "5fb67397-5138-46d4-9a7f-ec95a9cee2b7"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.431101 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "5fb67397-5138-46d4-9a7f-ec95a9cee2b7" (UID: "5fb67397-5138-46d4-9a7f-ec95a9cee2b7"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.434552 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "5fb67397-5138-46d4-9a7f-ec95a9cee2b7" (UID: "5fb67397-5138-46d4-9a7f-ec95a9cee2b7"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.435107 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "5fb67397-5138-46d4-9a7f-ec95a9cee2b7" (UID: "5fb67397-5138-46d4-9a7f-ec95a9cee2b7"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.436232 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "5fb67397-5138-46d4-9a7f-ec95a9cee2b7" (UID: "5fb67397-5138-46d4-9a7f-ec95a9cee2b7"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.436485 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "5fb67397-5138-46d4-9a7f-ec95a9cee2b7" (UID: "5fb67397-5138-46d4-9a7f-ec95a9cee2b7"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.436707 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "5fb67397-5138-46d4-9a7f-ec95a9cee2b7" (UID: "5fb67397-5138-46d4-9a7f-ec95a9cee2b7"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.438660 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-kube-api-access-2tlqp" (OuterVolumeSpecName: "kube-api-access-2tlqp") pod "5fb67397-5138-46d4-9a7f-ec95a9cee2b7" (UID: "5fb67397-5138-46d4-9a7f-ec95a9cee2b7"). InnerVolumeSpecName "kube-api-access-2tlqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.439097 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "5fb67397-5138-46d4-9a7f-ec95a9cee2b7" (UID: "5fb67397-5138-46d4-9a7f-ec95a9cee2b7"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.441656 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "5fb67397-5138-46d4-9a7f-ec95a9cee2b7" (UID: "5fb67397-5138-46d4-9a7f-ec95a9cee2b7"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.441758 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "5fb67397-5138-46d4-9a7f-ec95a9cee2b7" (UID: "5fb67397-5138-46d4-9a7f-ec95a9cee2b7"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.528624 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.528659 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.528677 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tlqp\" (UniqueName: \"kubernetes.io/projected/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-kube-api-access-2tlqp\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.528691 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.528704 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.528717 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.528729 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.528743 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.528755 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.528767 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.528781 4612 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.528795 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.528812 4612 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.528826 4612 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5fb67397-5138-46d4-9a7f-ec95a9cee2b7-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.566862 4612 generic.go:334] "Generic (PLEG): container finished" podID="5fb67397-5138-46d4-9a7f-ec95a9cee2b7" containerID="f9837eeee3e08db8dd17ecea628f4ffb237caeb1e91d41d9d028e535ced36f64" exitCode=0 Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.566931 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.566909 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" event={"ID":"5fb67397-5138-46d4-9a7f-ec95a9cee2b7","Type":"ContainerDied","Data":"f9837eeee3e08db8dd17ecea628f4ffb237caeb1e91d41d9d028e535ced36f64"} Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.567005 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tznzj" event={"ID":"5fb67397-5138-46d4-9a7f-ec95a9cee2b7","Type":"ContainerDied","Data":"69f12dcf5b93bdda55cfacf8e41deaa174bbaa3a8c0e434f8f62c1e3a043417c"} Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.567037 4612 scope.go:117] "RemoveContainer" containerID="f9837eeee3e08db8dd17ecea628f4ffb237caeb1e91d41d9d028e535ced36f64" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.578657 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"26bbcf2e22ffcf02f5c516ea8a8b51bc364cb668e848a002ba73ab7e4168cfd5"} Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.578715 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9a2358a821e16ff87aa66965ac50135df1fc4f602e0d50f4e45f66d4a518e4b6"} Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.578731 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"83e9119d19279e04f5a50f8d7541ea7923a5575644cdb0ba4183673edffd4ee8"} Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.581401 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.581448 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9269cd54d33cb990fde6c26ec89d56607c1e5650a270f60a2a903184171fc141"} Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.591536 4612 scope.go:117] "RemoveContainer" containerID="f9837eeee3e08db8dd17ecea628f4ffb237caeb1e91d41d9d028e535ced36f64" Dec 03 07:31:31 crc kubenswrapper[4612]: E1203 07:31:31.592077 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9837eeee3e08db8dd17ecea628f4ffb237caeb1e91d41d9d028e535ced36f64\": container with ID starting with f9837eeee3e08db8dd17ecea628f4ffb237caeb1e91d41d9d028e535ced36f64 not found: ID does not exist" containerID="f9837eeee3e08db8dd17ecea628f4ffb237caeb1e91d41d9d028e535ced36f64" Dec 03 07:31:31 crc kubenswrapper[4612]: I1203 07:31:31.592114 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9837eeee3e08db8dd17ecea628f4ffb237caeb1e91d41d9d028e535ced36f64"} err="failed to get container status \"f9837eeee3e08db8dd17ecea628f4ffb237caeb1e91d41d9d028e535ced36f64\": rpc error: code = NotFound desc = could not find container \"f9837eeee3e08db8dd17ecea628f4ffb237caeb1e91d41d9d028e535ced36f64\": container with ID starting with f9837eeee3e08db8dd17ecea628f4ffb237caeb1e91d41d9d028e535ced36f64 not found: ID does not exist" Dec 03 07:31:32 crc kubenswrapper[4612]: I1203 07:31:32.590029 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"1180ae7d6faff0940476da6f2b98aa6c6f4a79484caff0a7d46bcdb16cf9581b"} Dec 03 07:31:32 crc kubenswrapper[4612]: I1203 07:31:32.590389 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b6ee478e6e4d9b4a4c0f3af245fa2830910d7e2c4598a8020ad6231cb58f2e6a"} Dec 03 07:31:32 crc kubenswrapper[4612]: I1203 07:31:32.590262 4612 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bb6f136-2fb9-4002-ad0d-206b8e43c6ea" Dec 03 07:31:32 crc kubenswrapper[4612]: I1203 07:31:32.590476 4612 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bb6f136-2fb9-4002-ad0d-206b8e43c6ea" Dec 03 07:31:32 crc kubenswrapper[4612]: I1203 07:31:32.590446 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:32 crc kubenswrapper[4612]: I1203 07:31:32.897668 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:31:34 crc kubenswrapper[4612]: I1203 07:31:34.116762 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:34 crc kubenswrapper[4612]: I1203 07:31:34.116839 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:34 crc kubenswrapper[4612]: I1203 07:31:34.123365 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:36 crc kubenswrapper[4612]: I1203 07:31:36.567829 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:31:36 crc kubenswrapper[4612]: I1203 07:31:36.568104 4612 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 07:31:36 crc kubenswrapper[4612]: I1203 07:31:36.568654 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 07:31:37 crc kubenswrapper[4612]: I1203 07:31:37.603182 4612 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:37 crc kubenswrapper[4612]: I1203 07:31:37.844269 4612 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="ea16328c-5dc8-40db-93ba-246a169f594c" Dec 03 07:31:38 crc kubenswrapper[4612]: I1203 07:31:38.619008 4612 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bb6f136-2fb9-4002-ad0d-206b8e43c6ea" Dec 03 07:31:38 crc kubenswrapper[4612]: I1203 07:31:38.619041 4612 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bb6f136-2fb9-4002-ad0d-206b8e43c6ea" Dec 03 07:31:38 crc kubenswrapper[4612]: I1203 07:31:38.622469 4612 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="ea16328c-5dc8-40db-93ba-246a169f594c" Dec 03 07:31:46 crc kubenswrapper[4612]: I1203 07:31:46.568731 4612 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 07:31:46 crc kubenswrapper[4612]: I1203 07:31:46.569278 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 07:31:46 crc kubenswrapper[4612]: I1203 07:31:46.913842 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 07:31:47 crc kubenswrapper[4612]: I1203 07:31:47.078708 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 07:31:47 crc kubenswrapper[4612]: I1203 07:31:47.467768 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 07:31:48 crc kubenswrapper[4612]: I1203 07:31:48.267962 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 07:31:48 crc kubenswrapper[4612]: I1203 07:31:48.333550 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 07:31:48 crc kubenswrapper[4612]: I1203 07:31:48.410802 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 07:31:48 crc kubenswrapper[4612]: I1203 07:31:48.412625 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 07:31:48 crc kubenswrapper[4612]: I1203 07:31:48.589572 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 07:31:48 crc kubenswrapper[4612]: I1203 07:31:48.785763 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 07:31:49 crc kubenswrapper[4612]: I1203 07:31:49.111813 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 07:31:49 crc kubenswrapper[4612]: I1203 07:31:49.435180 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 07:31:49 crc kubenswrapper[4612]: I1203 07:31:49.496091 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 07:31:49 crc kubenswrapper[4612]: I1203 07:31:49.733895 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 07:31:49 crc kubenswrapper[4612]: I1203 07:31:49.785857 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.102420 4612 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.143291 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.158656 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.315681 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.358847 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.618169 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.697429 4612 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.697774 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6hqst" podStartSLOduration=37.872653041 podStartE2EDuration="40.697739923s" podCreationTimestamp="2025-12-03 07:31:10 +0000 UTC" firstStartedPulling="2025-12-03 07:31:12.414069644 +0000 UTC m=+235.587427044" lastFinishedPulling="2025-12-03 07:31:15.239156526 +0000 UTC m=+238.412513926" observedRunningTime="2025-12-03 07:31:37.637283184 +0000 UTC m=+260.810640584" watchObservedRunningTime="2025-12-03 07:31:50.697739923 +0000 UTC m=+273.871097333" Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.702263 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tznzj","openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.702315 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.706884 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.708786 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.726033 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=13.726012541 podStartE2EDuration="13.726012541s" podCreationTimestamp="2025-12-03 07:31:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:31:50.723218231 +0000 UTC m=+273.896575631" watchObservedRunningTime="2025-12-03 07:31:50.726012541 +0000 UTC m=+273.899369951" Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.765213 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 07:31:50 crc kubenswrapper[4612]: I1203 07:31:50.903961 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.095226 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fb67397-5138-46d4-9a7f-ec95a9cee2b7" path="/var/lib/kubelet/pods/5fb67397-5138-46d4-9a7f-ec95a9cee2b7/volumes" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.249490 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.250948 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.251416 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.251748 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.253368 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.340159 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.396247 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.413242 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.451986 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.465397 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.551578 4612 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.568574 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.667489 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.679780 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.854694 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.865779 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 07:31:51 crc kubenswrapper[4612]: I1203 07:31:51.965569 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.112121 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.142566 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.144445 4612 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.170645 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.205910 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.227341 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.296985 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.388930 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.492402 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.503065 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.508357 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.620215 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.724217 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.808363 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.809552 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.816155 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.912343 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 07:31:52 crc kubenswrapper[4612]: I1203 07:31:52.921510 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.045875 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.099806 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.157553 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.160226 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.190355 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.263268 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.489863 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.489895 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.548397 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.578376 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.588898 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.760566 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.780019 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.813467 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.879120 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 07:31:53 crc kubenswrapper[4612]: I1203 07:31:53.985400 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.020222 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.022386 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.081730 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.095242 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.175144 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.195036 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.205258 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.223167 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.384352 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.423412 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.451032 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.459430 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.616702 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.788607 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.806756 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.807566 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.857602 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.891185 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.918059 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.941402 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.945234 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 07:31:54 crc kubenswrapper[4612]: I1203 07:31:54.971150 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.016701 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.093227 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.123609 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.165357 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.193842 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.254424 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.300342 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.371175 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.412222 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.464472 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.482571 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.490660 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.624455 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.660279 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.714025 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.743188 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.751933 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.788613 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 07:31:55 crc kubenswrapper[4612]: I1203 07:31:55.821261 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.000126 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.045401 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.052607 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.100558 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.109257 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.143730 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.204587 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.210561 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.248765 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.250195 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.265483 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.301359 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.348203 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.406365 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.501503 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.538979 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.564323 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.568058 4612 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.568121 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.568182 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.569014 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"9269cd54d33cb990fde6c26ec89d56607c1e5650a270f60a2a903184171fc141"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.569152 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://9269cd54d33cb990fde6c26ec89d56607c1e5650a270f60a2a903184171fc141" gracePeriod=30 Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.634298 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.776085 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.891444 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 07:31:56 crc kubenswrapper[4612]: I1203 07:31:56.918094 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.001320 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.119902 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.123544 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.149306 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.268855 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.295633 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-546468998b-rd678"] Dec 03 07:31:57 crc kubenswrapper[4612]: E1203 07:31:57.295826 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fb67397-5138-46d4-9a7f-ec95a9cee2b7" containerName="oauth-openshift" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.295837 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fb67397-5138-46d4-9a7f-ec95a9cee2b7" containerName="oauth-openshift" Dec 03 07:31:57 crc kubenswrapper[4612]: E1203 07:31:57.295851 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" containerName="installer" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.295856 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" containerName="installer" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.295966 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fb67397-5138-46d4-9a7f-ec95a9cee2b7" containerName="oauth-openshift" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.295977 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="010793a3-88fd-4772-bde7-ef4a17d40ba3" containerName="installer" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.296326 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.298111 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.298111 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.298522 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.300406 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.301613 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.302477 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.302489 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.302536 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.314531 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.315425 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.315838 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.316315 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.317633 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.318296 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.322762 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-546468998b-rd678"] Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.327595 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.334981 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.355996 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-session\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.356080 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8be225d1-3e6b-4108-bed6-622a60bc9b85-audit-dir\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.356107 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-user-template-error\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.356145 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2v76\" (UniqueName: \"kubernetes.io/projected/8be225d1-3e6b-4108-bed6-622a60bc9b85-kube-api-access-x2v76\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.356169 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.356196 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8be225d1-3e6b-4108-bed6-622a60bc9b85-audit-policies\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.356217 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-cliconfig\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.356256 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-user-template-login\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.356362 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.356447 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-service-ca\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.356490 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-serving-cert\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.356516 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.356544 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-router-certs\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.356815 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.356611 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.383866 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.456684 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.456882 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.457773 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.457804 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-router-certs\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.457829 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.457858 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-session\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.457876 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8be225d1-3e6b-4108-bed6-622a60bc9b85-audit-dir\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.457899 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-user-template-error\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.457924 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2v76\" (UniqueName: \"kubernetes.io/projected/8be225d1-3e6b-4108-bed6-622a60bc9b85-kube-api-access-x2v76\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.457960 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.457984 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8be225d1-3e6b-4108-bed6-622a60bc9b85-audit-policies\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.458003 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-cliconfig\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.458038 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-user-template-login\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.458072 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.458096 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-service-ca\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.458123 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-serving-cert\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.458664 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.459645 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-cliconfig\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.459876 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-service-ca\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.459972 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8be225d1-3e6b-4108-bed6-622a60bc9b85-audit-dir\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.460012 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8be225d1-3e6b-4108-bed6-622a60bc9b85-audit-policies\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.464153 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.464237 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-serving-cert\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.464281 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.464517 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-router-certs\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.465169 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.465432 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-system-session\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.466067 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-user-template-error\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.468339 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8be225d1-3e6b-4108-bed6-622a60bc9b85-v4-0-config-user-template-login\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.475265 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2v76\" (UniqueName: \"kubernetes.io/projected/8be225d1-3e6b-4108-bed6-622a60bc9b85-kube-api-access-x2v76\") pod \"oauth-openshift-546468998b-rd678\" (UID: \"8be225d1-3e6b-4108-bed6-622a60bc9b85\") " pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.520083 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.575114 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.581000 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.586198 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.633835 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.652539 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.822435 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.850936 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.879608 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 07:31:57 crc kubenswrapper[4612]: I1203 07:31:57.953673 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.010713 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.013937 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.018012 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.060612 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.126172 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.193645 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-546468998b-rd678"] Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.216041 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.278201 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.307761 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.308519 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.315854 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.366062 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.450257 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.483927 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.509056 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.591866 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.641178 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.655230 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.718976 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-546468998b-rd678" event={"ID":"8be225d1-3e6b-4108-bed6-622a60bc9b85","Type":"ContainerStarted","Data":"4baac2e713cf9cafa60c4c35e89a508e49850c68a631d22e563604ccdc31d5f6"} Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.719029 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-546468998b-rd678" event={"ID":"8be225d1-3e6b-4108-bed6-622a60bc9b85","Type":"ContainerStarted","Data":"53b75e18c131090234f389b95215255ad85c7e20dc13e4b8aa872853d468ffaf"} Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.720033 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.730515 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.741866 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-546468998b-rd678" podStartSLOduration=53.741853497 podStartE2EDuration="53.741853497s" podCreationTimestamp="2025-12-03 07:31:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:31:58.740289388 +0000 UTC m=+281.913646798" watchObservedRunningTime="2025-12-03 07:31:58.741853497 +0000 UTC m=+281.915210907" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.918807 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-546468998b-rd678" Dec 03 07:31:58 crc kubenswrapper[4612]: I1203 07:31:58.938794 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.010887 4612 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.011142 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://4f4f578590a493b6db5c13acc68646b1546286320c0d3abde30f3028ad5eb8c7" gracePeriod=5 Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.043979 4612 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.102726 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.117284 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.372525 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.418544 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.442455 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.564476 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.599301 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.633147 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.633989 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.664865 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.769560 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.931773 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.935584 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 07:31:59 crc kubenswrapper[4612]: I1203 07:31:59.957279 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.020970 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.045119 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.087014 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.223647 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.283785 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.349756 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.351852 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.406243 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.471827 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.547902 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.587282 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.635750 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.777236 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.796795 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.865852 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.883478 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 07:32:00 crc kubenswrapper[4612]: I1203 07:32:00.997613 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.049905 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.105102 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.161165 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.174316 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.206697 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.215503 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.276466 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.302339 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.433660 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.580022 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.613988 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.621104 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.621172 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.661966 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.664600 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.821241 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.923250 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.943256 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.952097 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 07:32:01 crc kubenswrapper[4612]: I1203 07:32:01.972015 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 07:32:02 crc kubenswrapper[4612]: I1203 07:32:02.025800 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 07:32:02 crc kubenswrapper[4612]: I1203 07:32:02.097572 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 07:32:02 crc kubenswrapper[4612]: I1203 07:32:02.239856 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 07:32:02 crc kubenswrapper[4612]: I1203 07:32:02.500353 4612 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 07:32:02 crc kubenswrapper[4612]: I1203 07:32:02.754733 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 07:32:02 crc kubenswrapper[4612]: I1203 07:32:02.903250 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 07:32:03 crc kubenswrapper[4612]: I1203 07:32:03.703658 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 07:32:03 crc kubenswrapper[4612]: I1203 07:32:03.747464 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.136457 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.136755 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.256168 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.256224 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.256312 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.256341 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.256326 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.256370 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.256409 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.256435 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.256567 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.256902 4612 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.256925 4612 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.256959 4612 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.256975 4612 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.269030 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.358801 4612 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.705709 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.750181 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.750248 4612 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="4f4f578590a493b6db5c13acc68646b1546286320c0d3abde30f3028ad5eb8c7" exitCode=137 Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.750306 4612 scope.go:117] "RemoveContainer" containerID="4f4f578590a493b6db5c13acc68646b1546286320c0d3abde30f3028ad5eb8c7" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.750355 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.776633 4612 scope.go:117] "RemoveContainer" containerID="4f4f578590a493b6db5c13acc68646b1546286320c0d3abde30f3028ad5eb8c7" Dec 03 07:32:04 crc kubenswrapper[4612]: E1203 07:32:04.777197 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f4f578590a493b6db5c13acc68646b1546286320c0d3abde30f3028ad5eb8c7\": container with ID starting with 4f4f578590a493b6db5c13acc68646b1546286320c0d3abde30f3028ad5eb8c7 not found: ID does not exist" containerID="4f4f578590a493b6db5c13acc68646b1546286320c0d3abde30f3028ad5eb8c7" Dec 03 07:32:04 crc kubenswrapper[4612]: I1203 07:32:04.777246 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f4f578590a493b6db5c13acc68646b1546286320c0d3abde30f3028ad5eb8c7"} err="failed to get container status \"4f4f578590a493b6db5c13acc68646b1546286320c0d3abde30f3028ad5eb8c7\": rpc error: code = NotFound desc = could not find container \"4f4f578590a493b6db5c13acc68646b1546286320c0d3abde30f3028ad5eb8c7\": container with ID starting with 4f4f578590a493b6db5c13acc68646b1546286320c0d3abde30f3028ad5eb8c7 not found: ID does not exist" Dec 03 07:32:05 crc kubenswrapper[4612]: I1203 07:32:05.096464 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 03 07:32:16 crc kubenswrapper[4612]: I1203 07:32:16.958442 4612 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 03 07:32:24 crc kubenswrapper[4612]: I1203 07:32:24.684287 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 07:32:26 crc kubenswrapper[4612]: I1203 07:32:26.879096 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 03 07:32:26 crc kubenswrapper[4612]: I1203 07:32:26.881103 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 07:32:26 crc kubenswrapper[4612]: I1203 07:32:26.881159 4612 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="9269cd54d33cb990fde6c26ec89d56607c1e5650a270f60a2a903184171fc141" exitCode=137 Dec 03 07:32:26 crc kubenswrapper[4612]: I1203 07:32:26.881194 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"9269cd54d33cb990fde6c26ec89d56607c1e5650a270f60a2a903184171fc141"} Dec 03 07:32:26 crc kubenswrapper[4612]: I1203 07:32:26.881233 4612 scope.go:117] "RemoveContainer" containerID="82e1dba5b904ac711029aacc495d83e8018b8f374ded09b9fcab1484cebfdff3" Dec 03 07:32:27 crc kubenswrapper[4612]: I1203 07:32:27.887838 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 03 07:32:27 crc kubenswrapper[4612]: I1203 07:32:27.890045 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9f277d50c8a5abfa4316010825d109e95b4df07b631f07b10320e21a5402f063"} Dec 03 07:32:32 crc kubenswrapper[4612]: I1203 07:32:32.897930 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:32:36 crc kubenswrapper[4612]: I1203 07:32:36.568617 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:32:36 crc kubenswrapper[4612]: I1203 07:32:36.572508 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:32:40 crc kubenswrapper[4612]: I1203 07:32:40.569590 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 07:32:42 crc kubenswrapper[4612]: I1203 07:32:42.902573 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.044106 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vj8kl"] Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.044715 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" podUID="3cb00e09-7604-4998-9c5e-00f758d2de98" containerName="controller-manager" containerID="cri-o://10b95ae2330f0ee9025ea3e4969f3d58e1f48957466659b010dc1c94a29477e8" gracePeriod=30 Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.051555 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm"] Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.051782 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" podUID="15002e9b-936b-438d-b53b-dc8764c9dea3" containerName="route-controller-manager" containerID="cri-o://9bf160b39be871f4658a4dcf9c689a541ed6056995f8ceac89d6191d6b98265f" gracePeriod=30 Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.617773 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.622523 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.678471 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-config\") pod \"3cb00e09-7604-4998-9c5e-00f758d2de98\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.678542 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3cb00e09-7604-4998-9c5e-00f758d2de98-serving-cert\") pod \"3cb00e09-7604-4998-9c5e-00f758d2de98\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.678563 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/15002e9b-936b-438d-b53b-dc8764c9dea3-serving-cert\") pod \"15002e9b-936b-438d-b53b-dc8764c9dea3\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.678580 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqs2b\" (UniqueName: \"kubernetes.io/projected/15002e9b-936b-438d-b53b-dc8764c9dea3-kube-api-access-kqs2b\") pod \"15002e9b-936b-438d-b53b-dc8764c9dea3\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.678602 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-proxy-ca-bundles\") pod \"3cb00e09-7604-4998-9c5e-00f758d2de98\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.678635 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/15002e9b-936b-438d-b53b-dc8764c9dea3-client-ca\") pod \"15002e9b-936b-438d-b53b-dc8764c9dea3\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.678651 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mttc\" (UniqueName: \"kubernetes.io/projected/3cb00e09-7604-4998-9c5e-00f758d2de98-kube-api-access-4mttc\") pod \"3cb00e09-7604-4998-9c5e-00f758d2de98\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.678697 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-client-ca\") pod \"3cb00e09-7604-4998-9c5e-00f758d2de98\" (UID: \"3cb00e09-7604-4998-9c5e-00f758d2de98\") " Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.678717 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15002e9b-936b-438d-b53b-dc8764c9dea3-config\") pod \"15002e9b-936b-438d-b53b-dc8764c9dea3\" (UID: \"15002e9b-936b-438d-b53b-dc8764c9dea3\") " Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.679413 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-config" (OuterVolumeSpecName: "config") pod "3cb00e09-7604-4998-9c5e-00f758d2de98" (UID: "3cb00e09-7604-4998-9c5e-00f758d2de98"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.679595 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15002e9b-936b-438d-b53b-dc8764c9dea3-config" (OuterVolumeSpecName: "config") pod "15002e9b-936b-438d-b53b-dc8764c9dea3" (UID: "15002e9b-936b-438d-b53b-dc8764c9dea3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.679705 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15002e9b-936b-438d-b53b-dc8764c9dea3-client-ca" (OuterVolumeSpecName: "client-ca") pod "15002e9b-936b-438d-b53b-dc8764c9dea3" (UID: "15002e9b-936b-438d-b53b-dc8764c9dea3"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.680226 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-client-ca" (OuterVolumeSpecName: "client-ca") pod "3cb00e09-7604-4998-9c5e-00f758d2de98" (UID: "3cb00e09-7604-4998-9c5e-00f758d2de98"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.680689 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "3cb00e09-7604-4998-9c5e-00f758d2de98" (UID: "3cb00e09-7604-4998-9c5e-00f758d2de98"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.687051 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15002e9b-936b-438d-b53b-dc8764c9dea3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "15002e9b-936b-438d-b53b-dc8764c9dea3" (UID: "15002e9b-936b-438d-b53b-dc8764c9dea3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.692119 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb00e09-7604-4998-9c5e-00f758d2de98-kube-api-access-4mttc" (OuterVolumeSpecName: "kube-api-access-4mttc") pod "3cb00e09-7604-4998-9c5e-00f758d2de98" (UID: "3cb00e09-7604-4998-9c5e-00f758d2de98"). InnerVolumeSpecName "kube-api-access-4mttc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.693155 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15002e9b-936b-438d-b53b-dc8764c9dea3-kube-api-access-kqs2b" (OuterVolumeSpecName: "kube-api-access-kqs2b") pod "15002e9b-936b-438d-b53b-dc8764c9dea3" (UID: "15002e9b-936b-438d-b53b-dc8764c9dea3"). InnerVolumeSpecName "kube-api-access-kqs2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.693481 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cb00e09-7604-4998-9c5e-00f758d2de98-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3cb00e09-7604-4998-9c5e-00f758d2de98" (UID: "3cb00e09-7604-4998-9c5e-00f758d2de98"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.779425 4612 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/15002e9b-936b-438d-b53b-dc8764c9dea3-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.779460 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mttc\" (UniqueName: \"kubernetes.io/projected/3cb00e09-7604-4998-9c5e-00f758d2de98-kube-api-access-4mttc\") on node \"crc\" DevicePath \"\"" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.779481 4612 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.779490 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15002e9b-936b-438d-b53b-dc8764c9dea3-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.779503 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.779512 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3cb00e09-7604-4998-9c5e-00f758d2de98-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.779537 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/15002e9b-936b-438d-b53b-dc8764c9dea3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.779545 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqs2b\" (UniqueName: \"kubernetes.io/projected/15002e9b-936b-438d-b53b-dc8764c9dea3-kube-api-access-kqs2b\") on node \"crc\" DevicePath \"\"" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.779562 4612 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3cb00e09-7604-4998-9c5e-00f758d2de98-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.985858 4612 generic.go:334] "Generic (PLEG): container finished" podID="3cb00e09-7604-4998-9c5e-00f758d2de98" containerID="10b95ae2330f0ee9025ea3e4969f3d58e1f48957466659b010dc1c94a29477e8" exitCode=0 Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.985916 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" event={"ID":"3cb00e09-7604-4998-9c5e-00f758d2de98","Type":"ContainerDied","Data":"10b95ae2330f0ee9025ea3e4969f3d58e1f48957466659b010dc1c94a29477e8"} Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.985963 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" event={"ID":"3cb00e09-7604-4998-9c5e-00f758d2de98","Type":"ContainerDied","Data":"72565cbc57c43791105bcb71d4d1cd75499399b05e53c7b1656239d61f39e721"} Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.985993 4612 scope.go:117] "RemoveContainer" containerID="10b95ae2330f0ee9025ea3e4969f3d58e1f48957466659b010dc1c94a29477e8" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.986098 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vj8kl" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.991572 4612 generic.go:334] "Generic (PLEG): container finished" podID="15002e9b-936b-438d-b53b-dc8764c9dea3" containerID="9bf160b39be871f4658a4dcf9c689a541ed6056995f8ceac89d6191d6b98265f" exitCode=0 Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.991622 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" event={"ID":"15002e9b-936b-438d-b53b-dc8764c9dea3","Type":"ContainerDied","Data":"9bf160b39be871f4658a4dcf9c689a541ed6056995f8ceac89d6191d6b98265f"} Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.991631 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" Dec 03 07:32:45 crc kubenswrapper[4612]: I1203 07:32:45.991655 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm" event={"ID":"15002e9b-936b-438d-b53b-dc8764c9dea3","Type":"ContainerDied","Data":"668d5b4ffedae750518bf98ea6559fb4626babd53a767656d1640c40d7ff93a3"} Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.005826 4612 scope.go:117] "RemoveContainer" containerID="10b95ae2330f0ee9025ea3e4969f3d58e1f48957466659b010dc1c94a29477e8" Dec 03 07:32:46 crc kubenswrapper[4612]: E1203 07:32:46.006701 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10b95ae2330f0ee9025ea3e4969f3d58e1f48957466659b010dc1c94a29477e8\": container with ID starting with 10b95ae2330f0ee9025ea3e4969f3d58e1f48957466659b010dc1c94a29477e8 not found: ID does not exist" containerID="10b95ae2330f0ee9025ea3e4969f3d58e1f48957466659b010dc1c94a29477e8" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.006735 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10b95ae2330f0ee9025ea3e4969f3d58e1f48957466659b010dc1c94a29477e8"} err="failed to get container status \"10b95ae2330f0ee9025ea3e4969f3d58e1f48957466659b010dc1c94a29477e8\": rpc error: code = NotFound desc = could not find container \"10b95ae2330f0ee9025ea3e4969f3d58e1f48957466659b010dc1c94a29477e8\": container with ID starting with 10b95ae2330f0ee9025ea3e4969f3d58e1f48957466659b010dc1c94a29477e8 not found: ID does not exist" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.006759 4612 scope.go:117] "RemoveContainer" containerID="9bf160b39be871f4658a4dcf9c689a541ed6056995f8ceac89d6191d6b98265f" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.029249 4612 scope.go:117] "RemoveContainer" containerID="9bf160b39be871f4658a4dcf9c689a541ed6056995f8ceac89d6191d6b98265f" Dec 03 07:32:46 crc kubenswrapper[4612]: E1203 07:32:46.029987 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bf160b39be871f4658a4dcf9c689a541ed6056995f8ceac89d6191d6b98265f\": container with ID starting with 9bf160b39be871f4658a4dcf9c689a541ed6056995f8ceac89d6191d6b98265f not found: ID does not exist" containerID="9bf160b39be871f4658a4dcf9c689a541ed6056995f8ceac89d6191d6b98265f" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.030035 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bf160b39be871f4658a4dcf9c689a541ed6056995f8ceac89d6191d6b98265f"} err="failed to get container status \"9bf160b39be871f4658a4dcf9c689a541ed6056995f8ceac89d6191d6b98265f\": rpc error: code = NotFound desc = could not find container \"9bf160b39be871f4658a4dcf9c689a541ed6056995f8ceac89d6191d6b98265f\": container with ID starting with 9bf160b39be871f4658a4dcf9c689a541ed6056995f8ceac89d6191d6b98265f not found: ID does not exist" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.032464 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm"] Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.036095 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dnbm"] Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.047083 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vj8kl"] Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.053665 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vj8kl"] Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.573589 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph"] Dec 03 07:32:46 crc kubenswrapper[4612]: E1203 07:32:46.574039 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cb00e09-7604-4998-9c5e-00f758d2de98" containerName="controller-manager" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.574051 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cb00e09-7604-4998-9c5e-00f758d2de98" containerName="controller-manager" Dec 03 07:32:46 crc kubenswrapper[4612]: E1203 07:32:46.574068 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15002e9b-936b-438d-b53b-dc8764c9dea3" containerName="route-controller-manager" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.574074 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="15002e9b-936b-438d-b53b-dc8764c9dea3" containerName="route-controller-manager" Dec 03 07:32:46 crc kubenswrapper[4612]: E1203 07:32:46.574082 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.574089 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.574181 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cb00e09-7604-4998-9c5e-00f758d2de98" containerName="controller-manager" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.574193 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="15002e9b-936b-438d-b53b-dc8764c9dea3" containerName="route-controller-manager" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.574200 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.574566 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.576307 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.576711 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.577407 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.578165 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.578246 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.578732 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.582476 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-78fc4996d6-pbhkq"] Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.583292 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.592757 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph"] Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.593668 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.600002 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.600301 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.600599 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.600867 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.602434 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.627060 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.627275 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-78fc4996d6-pbhkq"] Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.700627 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-client-ca\") pod \"route-controller-manager-5c5f7bc78d-xwlph\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.700681 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4lnp\" (UniqueName: \"kubernetes.io/projected/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-kube-api-access-m4lnp\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.700709 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-config\") pod \"route-controller-manager-5c5f7bc78d-xwlph\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.700758 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-serving-cert\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.700800 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvhpb\" (UniqueName: \"kubernetes.io/projected/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-kube-api-access-dvhpb\") pod \"route-controller-manager-5c5f7bc78d-xwlph\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.700819 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-serving-cert\") pod \"route-controller-manager-5c5f7bc78d-xwlph\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.700848 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-config\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.700872 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-client-ca\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.700910 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-proxy-ca-bundles\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.802381 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-client-ca\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.803074 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-proxy-ca-bundles\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.803118 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-client-ca\") pod \"route-controller-manager-5c5f7bc78d-xwlph\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.803146 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4lnp\" (UniqueName: \"kubernetes.io/projected/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-kube-api-access-m4lnp\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.803170 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-config\") pod \"route-controller-manager-5c5f7bc78d-xwlph\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.803213 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-serving-cert\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.803254 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvhpb\" (UniqueName: \"kubernetes.io/projected/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-kube-api-access-dvhpb\") pod \"route-controller-manager-5c5f7bc78d-xwlph\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.803279 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-serving-cert\") pod \"route-controller-manager-5c5f7bc78d-xwlph\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.803310 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-config\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.803949 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-proxy-ca-bundles\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.804146 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-client-ca\") pod \"route-controller-manager-5c5f7bc78d-xwlph\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.803256 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-client-ca\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.804421 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-config\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.805049 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-config\") pod \"route-controller-manager-5c5f7bc78d-xwlph\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.810348 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-serving-cert\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.810423 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-serving-cert\") pod \"route-controller-manager-5c5f7bc78d-xwlph\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.825996 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvhpb\" (UniqueName: \"kubernetes.io/projected/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-kube-api-access-dvhpb\") pod \"route-controller-manager-5c5f7bc78d-xwlph\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.838344 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4lnp\" (UniqueName: \"kubernetes.io/projected/e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe-kube-api-access-m4lnp\") pod \"controller-manager-78fc4996d6-pbhkq\" (UID: \"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe\") " pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.899078 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:46 crc kubenswrapper[4612]: I1203 07:32:46.927621 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:47 crc kubenswrapper[4612]: I1203 07:32:47.099079 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15002e9b-936b-438d-b53b-dc8764c9dea3" path="/var/lib/kubelet/pods/15002e9b-936b-438d-b53b-dc8764c9dea3/volumes" Dec 03 07:32:47 crc kubenswrapper[4612]: I1203 07:32:47.100159 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb00e09-7604-4998-9c5e-00f758d2de98" path="/var/lib/kubelet/pods/3cb00e09-7604-4998-9c5e-00f758d2de98/volumes" Dec 03 07:32:47 crc kubenswrapper[4612]: I1203 07:32:47.146634 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-78fc4996d6-pbhkq"] Dec 03 07:32:47 crc kubenswrapper[4612]: I1203 07:32:47.225077 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph"] Dec 03 07:32:47 crc kubenswrapper[4612]: W1203 07:32:47.241496 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fd3efd4_b4c3_4b27_8b27_016cd9c9da70.slice/crio-92695025f2dd7cf017a0043e69f0d8a21de0a310ba957506982378c3b25f3d37 WatchSource:0}: Error finding container 92695025f2dd7cf017a0043e69f0d8a21de0a310ba957506982378c3b25f3d37: Status 404 returned error can't find the container with id 92695025f2dd7cf017a0043e69f0d8a21de0a310ba957506982378c3b25f3d37 Dec 03 07:32:48 crc kubenswrapper[4612]: I1203 07:32:48.019074 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" event={"ID":"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70","Type":"ContainerStarted","Data":"f478530e8dbba2383655134736fb76af532f535157d4ac003302b3c04114d5d0"} Dec 03 07:32:48 crc kubenswrapper[4612]: I1203 07:32:48.019363 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" event={"ID":"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70","Type":"ContainerStarted","Data":"92695025f2dd7cf017a0043e69f0d8a21de0a310ba957506982378c3b25f3d37"} Dec 03 07:32:48 crc kubenswrapper[4612]: I1203 07:32:48.019378 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:48 crc kubenswrapper[4612]: I1203 07:32:48.021279 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" event={"ID":"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe","Type":"ContainerStarted","Data":"7663eb12f4e193a48ee385f14a9a1c1cc24ce0b43c440bc28f081f21e2f5f5ad"} Dec 03 07:32:48 crc kubenswrapper[4612]: I1203 07:32:48.021314 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" event={"ID":"e971b59c-cfcb-4ae4-b617-aa2b3d7d78fe","Type":"ContainerStarted","Data":"6c1e27db72cdf751cf81a6eb634130e7cb83357e58e0038f8e076293a9f6df06"} Dec 03 07:32:48 crc kubenswrapper[4612]: I1203 07:32:48.021738 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:48 crc kubenswrapper[4612]: I1203 07:32:48.024810 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:32:48 crc kubenswrapper[4612]: I1203 07:32:48.025738 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" Dec 03 07:32:48 crc kubenswrapper[4612]: I1203 07:32:48.038754 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" podStartSLOduration=3.038733513 podStartE2EDuration="3.038733513s" podCreationTimestamp="2025-12-03 07:32:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:32:48.033629792 +0000 UTC m=+331.206987212" watchObservedRunningTime="2025-12-03 07:32:48.038733513 +0000 UTC m=+331.212090943" Dec 03 07:32:48 crc kubenswrapper[4612]: I1203 07:32:48.055194 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-78fc4996d6-pbhkq" podStartSLOduration=3.055176303 podStartE2EDuration="3.055176303s" podCreationTimestamp="2025-12-03 07:32:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:32:48.053017908 +0000 UTC m=+331.226375328" watchObservedRunningTime="2025-12-03 07:32:48.055176303 +0000 UTC m=+331.228533713" Dec 03 07:33:17 crc kubenswrapper[4612]: I1203 07:33:17.136624 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:33:17 crc kubenswrapper[4612]: I1203 07:33:17.137318 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:33:28 crc kubenswrapper[4612]: I1203 07:33:28.811411 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bq4xt"] Dec 03 07:33:28 crc kubenswrapper[4612]: I1203 07:33:28.812826 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:28 crc kubenswrapper[4612]: I1203 07:33:28.822582 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bq4xt"] Dec 03 07:33:28 crc kubenswrapper[4612]: I1203 07:33:28.944302 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h6qh\" (UniqueName: \"kubernetes.io/projected/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-kube-api-access-4h6qh\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:28 crc kubenswrapper[4612]: I1203 07:33:28.944595 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-registry-tls\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:28 crc kubenswrapper[4612]: I1203 07:33:28.944641 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-trusted-ca\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:28 crc kubenswrapper[4612]: I1203 07:33:28.944663 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-registry-certificates\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:28 crc kubenswrapper[4612]: I1203 07:33:28.944742 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:28 crc kubenswrapper[4612]: I1203 07:33:28.944803 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-bound-sa-token\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:28 crc kubenswrapper[4612]: I1203 07:33:28.944833 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:28 crc kubenswrapper[4612]: I1203 07:33:28.944860 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:28 crc kubenswrapper[4612]: I1203 07:33:28.965740 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.045364 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-trusted-ca\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.045406 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-registry-certificates\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.045454 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-bound-sa-token\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.045479 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.045499 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.045518 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h6qh\" (UniqueName: \"kubernetes.io/projected/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-kube-api-access-4h6qh\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.045539 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-registry-tls\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.046868 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.047006 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-trusted-ca\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.048308 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-registry-certificates\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.051300 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.051721 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-registry-tls\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.063447 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-bound-sa-token\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.064139 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h6qh\" (UniqueName: \"kubernetes.io/projected/0a93217a-1e45-45fd-b3f1-9adafce5d6ed-kube-api-access-4h6qh\") pod \"image-registry-66df7c8f76-bq4xt\" (UID: \"0a93217a-1e45-45fd-b3f1-9adafce5d6ed\") " pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.126723 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:29 crc kubenswrapper[4612]: I1203 07:33:29.519503 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bq4xt"] Dec 03 07:33:30 crc kubenswrapper[4612]: I1203 07:33:30.232536 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" event={"ID":"0a93217a-1e45-45fd-b3f1-9adafce5d6ed","Type":"ContainerStarted","Data":"ff546c7e86f75c8614529cf6d819b865854025393d752953f6637ab54e07af4c"} Dec 03 07:33:30 crc kubenswrapper[4612]: I1203 07:33:30.232584 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" event={"ID":"0a93217a-1e45-45fd-b3f1-9adafce5d6ed","Type":"ContainerStarted","Data":"733068af2a9ce520c5c0f540cd49f0eba97988ebc55b34d0c22eb503221bd45d"} Dec 03 07:33:30 crc kubenswrapper[4612]: I1203 07:33:30.232700 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:30 crc kubenswrapper[4612]: I1203 07:33:30.266927 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" podStartSLOduration=2.266909144 podStartE2EDuration="2.266909144s" podCreationTimestamp="2025-12-03 07:33:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:33:30.262024129 +0000 UTC m=+373.435381569" watchObservedRunningTime="2025-12-03 07:33:30.266909144 +0000 UTC m=+373.440266554" Dec 03 07:33:43 crc kubenswrapper[4612]: I1203 07:33:43.593154 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph"] Dec 03 07:33:43 crc kubenswrapper[4612]: I1203 07:33:43.594136 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" podUID="6fd3efd4-b4c3-4b27-8b27-016cd9c9da70" containerName="route-controller-manager" containerID="cri-o://f478530e8dbba2383655134736fb76af532f535157d4ac003302b3c04114d5d0" gracePeriod=30 Dec 03 07:33:43 crc kubenswrapper[4612]: I1203 07:33:43.908234 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:33:43 crc kubenswrapper[4612]: I1203 07:33:43.958894 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-config\") pod \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " Dec 03 07:33:43 crc kubenswrapper[4612]: I1203 07:33:43.959007 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-client-ca\") pod \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " Dec 03 07:33:43 crc kubenswrapper[4612]: I1203 07:33:43.959070 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-serving-cert\") pod \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " Dec 03 07:33:43 crc kubenswrapper[4612]: I1203 07:33:43.959130 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvhpb\" (UniqueName: \"kubernetes.io/projected/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-kube-api-access-dvhpb\") pod \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\" (UID: \"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70\") " Dec 03 07:33:43 crc kubenswrapper[4612]: I1203 07:33:43.960216 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-config" (OuterVolumeSpecName: "config") pod "6fd3efd4-b4c3-4b27-8b27-016cd9c9da70" (UID: "6fd3efd4-b4c3-4b27-8b27-016cd9c9da70"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:33:43 crc kubenswrapper[4612]: I1203 07:33:43.960345 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-client-ca" (OuterVolumeSpecName: "client-ca") pod "6fd3efd4-b4c3-4b27-8b27-016cd9c9da70" (UID: "6fd3efd4-b4c3-4b27-8b27-016cd9c9da70"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:33:43 crc kubenswrapper[4612]: I1203 07:33:43.965016 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6fd3efd4-b4c3-4b27-8b27-016cd9c9da70" (UID: "6fd3efd4-b4c3-4b27-8b27-016cd9c9da70"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:33:43 crc kubenswrapper[4612]: I1203 07:33:43.968687 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-kube-api-access-dvhpb" (OuterVolumeSpecName: "kube-api-access-dvhpb") pod "6fd3efd4-b4c3-4b27-8b27-016cd9c9da70" (UID: "6fd3efd4-b4c3-4b27-8b27-016cd9c9da70"). InnerVolumeSpecName "kube-api-access-dvhpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.060280 4612 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.060309 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvhpb\" (UniqueName: \"kubernetes.io/projected/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-kube-api-access-dvhpb\") on node \"crc\" DevicePath \"\"" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.060321 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.060331 4612 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.309110 4612 generic.go:334] "Generic (PLEG): container finished" podID="6fd3efd4-b4c3-4b27-8b27-016cd9c9da70" containerID="f478530e8dbba2383655134736fb76af532f535157d4ac003302b3c04114d5d0" exitCode=0 Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.309162 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" event={"ID":"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70","Type":"ContainerDied","Data":"f478530e8dbba2383655134736fb76af532f535157d4ac003302b3c04114d5d0"} Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.309168 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.309193 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph" event={"ID":"6fd3efd4-b4c3-4b27-8b27-016cd9c9da70","Type":"ContainerDied","Data":"92695025f2dd7cf017a0043e69f0d8a21de0a310ba957506982378c3b25f3d37"} Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.309214 4612 scope.go:117] "RemoveContainer" containerID="f478530e8dbba2383655134736fb76af532f535157d4ac003302b3c04114d5d0" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.324716 4612 scope.go:117] "RemoveContainer" containerID="f478530e8dbba2383655134736fb76af532f535157d4ac003302b3c04114d5d0" Dec 03 07:33:44 crc kubenswrapper[4612]: E1203 07:33:44.325386 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f478530e8dbba2383655134736fb76af532f535157d4ac003302b3c04114d5d0\": container with ID starting with f478530e8dbba2383655134736fb76af532f535157d4ac003302b3c04114d5d0 not found: ID does not exist" containerID="f478530e8dbba2383655134736fb76af532f535157d4ac003302b3c04114d5d0" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.325417 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f478530e8dbba2383655134736fb76af532f535157d4ac003302b3c04114d5d0"} err="failed to get container status \"f478530e8dbba2383655134736fb76af532f535157d4ac003302b3c04114d5d0\": rpc error: code = NotFound desc = could not find container \"f478530e8dbba2383655134736fb76af532f535157d4ac003302b3c04114d5d0\": container with ID starting with f478530e8dbba2383655134736fb76af532f535157d4ac003302b3c04114d5d0 not found: ID does not exist" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.347845 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph"] Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.351654 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c5f7bc78d-xwlph"] Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.653746 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn"] Dec 03 07:33:44 crc kubenswrapper[4612]: E1203 07:33:44.654023 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fd3efd4-b4c3-4b27-8b27-016cd9c9da70" containerName="route-controller-manager" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.654039 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fd3efd4-b4c3-4b27-8b27-016cd9c9da70" containerName="route-controller-manager" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.654154 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fd3efd4-b4c3-4b27-8b27-016cd9c9da70" containerName="route-controller-manager" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.654604 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.657306 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.657326 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.658077 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.658141 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.658286 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.658505 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.668782 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn"] Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.671744 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc0480fd-20f1-4754-92e3-162e7da7ca4f-config\") pod \"route-controller-manager-5f496ddc85-cjzbn\" (UID: \"dc0480fd-20f1-4754-92e3-162e7da7ca4f\") " pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.671871 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28k5z\" (UniqueName: \"kubernetes.io/projected/dc0480fd-20f1-4754-92e3-162e7da7ca4f-kube-api-access-28k5z\") pod \"route-controller-manager-5f496ddc85-cjzbn\" (UID: \"dc0480fd-20f1-4754-92e3-162e7da7ca4f\") " pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.671973 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc0480fd-20f1-4754-92e3-162e7da7ca4f-client-ca\") pod \"route-controller-manager-5f496ddc85-cjzbn\" (UID: \"dc0480fd-20f1-4754-92e3-162e7da7ca4f\") " pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.672120 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc0480fd-20f1-4754-92e3-162e7da7ca4f-serving-cert\") pod \"route-controller-manager-5f496ddc85-cjzbn\" (UID: \"dc0480fd-20f1-4754-92e3-162e7da7ca4f\") " pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.774127 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc0480fd-20f1-4754-92e3-162e7da7ca4f-config\") pod \"route-controller-manager-5f496ddc85-cjzbn\" (UID: \"dc0480fd-20f1-4754-92e3-162e7da7ca4f\") " pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.774516 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28k5z\" (UniqueName: \"kubernetes.io/projected/dc0480fd-20f1-4754-92e3-162e7da7ca4f-kube-api-access-28k5z\") pod \"route-controller-manager-5f496ddc85-cjzbn\" (UID: \"dc0480fd-20f1-4754-92e3-162e7da7ca4f\") " pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.774569 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc0480fd-20f1-4754-92e3-162e7da7ca4f-client-ca\") pod \"route-controller-manager-5f496ddc85-cjzbn\" (UID: \"dc0480fd-20f1-4754-92e3-162e7da7ca4f\") " pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.774619 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc0480fd-20f1-4754-92e3-162e7da7ca4f-serving-cert\") pod \"route-controller-manager-5f496ddc85-cjzbn\" (UID: \"dc0480fd-20f1-4754-92e3-162e7da7ca4f\") " pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.775958 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc0480fd-20f1-4754-92e3-162e7da7ca4f-client-ca\") pod \"route-controller-manager-5f496ddc85-cjzbn\" (UID: \"dc0480fd-20f1-4754-92e3-162e7da7ca4f\") " pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.776078 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc0480fd-20f1-4754-92e3-162e7da7ca4f-config\") pod \"route-controller-manager-5f496ddc85-cjzbn\" (UID: \"dc0480fd-20f1-4754-92e3-162e7da7ca4f\") " pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.779895 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc0480fd-20f1-4754-92e3-162e7da7ca4f-serving-cert\") pod \"route-controller-manager-5f496ddc85-cjzbn\" (UID: \"dc0480fd-20f1-4754-92e3-162e7da7ca4f\") " pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.802373 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28k5z\" (UniqueName: \"kubernetes.io/projected/dc0480fd-20f1-4754-92e3-162e7da7ca4f-kube-api-access-28k5z\") pod \"route-controller-manager-5f496ddc85-cjzbn\" (UID: \"dc0480fd-20f1-4754-92e3-162e7da7ca4f\") " pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:44 crc kubenswrapper[4612]: I1203 07:33:44.997686 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:45 crc kubenswrapper[4612]: I1203 07:33:45.100263 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fd3efd4-b4c3-4b27-8b27-016cd9c9da70" path="/var/lib/kubelet/pods/6fd3efd4-b4c3-4b27-8b27-016cd9c9da70/volumes" Dec 03 07:33:45 crc kubenswrapper[4612]: I1203 07:33:45.220503 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn"] Dec 03 07:33:45 crc kubenswrapper[4612]: I1203 07:33:45.316475 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" event={"ID":"dc0480fd-20f1-4754-92e3-162e7da7ca4f","Type":"ContainerStarted","Data":"9f77724f9ca4df89e6f90a9c6f6e37b0dc2cb5fdd5a5b3b4287da18422cf875d"} Dec 03 07:33:46 crc kubenswrapper[4612]: I1203 07:33:46.327152 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" event={"ID":"dc0480fd-20f1-4754-92e3-162e7da7ca4f","Type":"ContainerStarted","Data":"e140a137e16d791ccfdaf4bb896fcdd09d7ee6ce323929025cec9cab03798df2"} Dec 03 07:33:46 crc kubenswrapper[4612]: I1203 07:33:46.328400 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:46 crc kubenswrapper[4612]: I1203 07:33:46.337797 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" Dec 03 07:33:46 crc kubenswrapper[4612]: I1203 07:33:46.359658 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5f496ddc85-cjzbn" podStartSLOduration=3.359633153 podStartE2EDuration="3.359633153s" podCreationTimestamp="2025-12-03 07:33:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:33:46.346903238 +0000 UTC m=+389.520260688" watchObservedRunningTime="2025-12-03 07:33:46.359633153 +0000 UTC m=+389.532990593" Dec 03 07:33:47 crc kubenswrapper[4612]: I1203 07:33:47.136365 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:33:47 crc kubenswrapper[4612]: I1203 07:33:47.136726 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:33:49 crc kubenswrapper[4612]: I1203 07:33:49.133754 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-bq4xt" Dec 03 07:33:49 crc kubenswrapper[4612]: I1203 07:33:49.202447 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-kjpvq"] Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.240930 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" podUID="4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf" containerName="registry" containerID="cri-o://a312b03f4510490a7c149bd32fd50830b58acb615595eb3ee89b802a8e42f803" gracePeriod=30 Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.493548 4612 generic.go:334] "Generic (PLEG): container finished" podID="4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf" containerID="a312b03f4510490a7c149bd32fd50830b58acb615595eb3ee89b802a8e42f803" exitCode=0 Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.493639 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" event={"ID":"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf","Type":"ContainerDied","Data":"a312b03f4510490a7c149bd32fd50830b58acb615595eb3ee89b802a8e42f803"} Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.584984 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.692278 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-ca-trust-extracted\") pod \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.692351 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-installation-pull-secrets\") pod \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.692399 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-registry-tls\") pod \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.692431 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4q745\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-kube-api-access-4q745\") pod \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.692500 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-trusted-ca\") pod \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.692580 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-registry-certificates\") pod \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.692736 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.692784 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-bound-sa-token\") pod \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\" (UID: \"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf\") " Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.694011 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.694185 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.700324 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.701085 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-kube-api-access-4q745" (OuterVolumeSpecName: "kube-api-access-4q745") pod "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf"). InnerVolumeSpecName "kube-api-access-4q745". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.701639 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.707271 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.707879 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.708463 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf" (UID: "4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.794740 4612 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.794777 4612 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.794786 4612 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.794803 4612 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.794812 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4q745\" (UniqueName: \"kubernetes.io/projected/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-kube-api-access-4q745\") on node \"crc\" DevicePath \"\"" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.794820 4612 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:34:14 crc kubenswrapper[4612]: I1203 07:34:14.794828 4612 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 07:34:15 crc kubenswrapper[4612]: I1203 07:34:15.501721 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" event={"ID":"4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf","Type":"ContainerDied","Data":"a484bfd4590cbbffa2496acdac9504bb172566575cb6b80f87fe15a8afeb068a"} Dec 03 07:34:15 crc kubenswrapper[4612]: I1203 07:34:15.502063 4612 scope.go:117] "RemoveContainer" containerID="a312b03f4510490a7c149bd32fd50830b58acb615595eb3ee89b802a8e42f803" Dec 03 07:34:15 crc kubenswrapper[4612]: I1203 07:34:15.501764 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-kjpvq" Dec 03 07:34:15 crc kubenswrapper[4612]: I1203 07:34:15.530256 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-kjpvq"] Dec 03 07:34:15 crc kubenswrapper[4612]: I1203 07:34:15.533063 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-kjpvq"] Dec 03 07:34:17 crc kubenswrapper[4612]: I1203 07:34:17.097398 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf" path="/var/lib/kubelet/pods/4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf/volumes" Dec 03 07:34:17 crc kubenswrapper[4612]: I1203 07:34:17.136188 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:34:17 crc kubenswrapper[4612]: I1203 07:34:17.136276 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:34:17 crc kubenswrapper[4612]: I1203 07:34:17.136338 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:34:17 crc kubenswrapper[4612]: I1203 07:34:17.137170 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"496b2817b85007c387e388726327b1b3220dec10963ee8cffca5321badedaf85"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 07:34:17 crc kubenswrapper[4612]: I1203 07:34:17.137332 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://496b2817b85007c387e388726327b1b3220dec10963ee8cffca5321badedaf85" gracePeriod=600 Dec 03 07:34:17 crc kubenswrapper[4612]: I1203 07:34:17.518301 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="496b2817b85007c387e388726327b1b3220dec10963ee8cffca5321badedaf85" exitCode=0 Dec 03 07:34:17 crc kubenswrapper[4612]: I1203 07:34:17.518373 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"496b2817b85007c387e388726327b1b3220dec10963ee8cffca5321badedaf85"} Dec 03 07:34:17 crc kubenswrapper[4612]: I1203 07:34:17.518573 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"a6d9c29b43953a091e80d49e0c054aba4416e3baeed2e1789edf4b26a4a652ee"} Dec 03 07:34:17 crc kubenswrapper[4612]: I1203 07:34:17.518595 4612 scope.go:117] "RemoveContainer" containerID="ddac3427bd3ff97c8c3f935f706eacb4aa563177d590e00498debf2c1fb21ad3" Dec 03 07:36:17 crc kubenswrapper[4612]: I1203 07:36:17.136115 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:36:17 crc kubenswrapper[4612]: I1203 07:36:17.136700 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:36:47 crc kubenswrapper[4612]: I1203 07:36:47.136383 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:36:47 crc kubenswrapper[4612]: I1203 07:36:47.136820 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:37:17 crc kubenswrapper[4612]: I1203 07:37:17.136474 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:37:17 crc kubenswrapper[4612]: I1203 07:37:17.137150 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:37:17 crc kubenswrapper[4612]: I1203 07:37:17.137507 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:37:17 crc kubenswrapper[4612]: I1203 07:37:17.138402 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a6d9c29b43953a091e80d49e0c054aba4416e3baeed2e1789edf4b26a4a652ee"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 07:37:17 crc kubenswrapper[4612]: I1203 07:37:17.138473 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://a6d9c29b43953a091e80d49e0c054aba4416e3baeed2e1789edf4b26a4a652ee" gracePeriod=600 Dec 03 07:37:17 crc kubenswrapper[4612]: I1203 07:37:17.544050 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="a6d9c29b43953a091e80d49e0c054aba4416e3baeed2e1789edf4b26a4a652ee" exitCode=0 Dec 03 07:37:17 crc kubenswrapper[4612]: I1203 07:37:17.544093 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"a6d9c29b43953a091e80d49e0c054aba4416e3baeed2e1789edf4b26a4a652ee"} Dec 03 07:37:17 crc kubenswrapper[4612]: I1203 07:37:17.544305 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"6eb537965b8dc9beeeb70ed8225f9d3e2c9c9ba317f26825107f0eb87a41a235"} Dec 03 07:37:17 crc kubenswrapper[4612]: I1203 07:37:17.544329 4612 scope.go:117] "RemoveContainer" containerID="496b2817b85007c387e388726327b1b3220dec10963ee8cffca5321badedaf85" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.747795 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-6gdmj"] Dec 03 07:37:46 crc kubenswrapper[4612]: E1203 07:37:46.748501 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf" containerName="registry" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.748514 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf" containerName="registry" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.748615 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fedd09f-d5c2-4d75-b12c-8f1cf4eccedf" containerName="registry" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.748998 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-6gdmj" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.750671 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.751066 4612 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-4n569" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.760520 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-6gdmj"] Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.763840 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.764748 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-4kwk4"] Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.765363 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-4kwk4" Dec 03 07:37:46 crc kubenswrapper[4612]: W1203 07:37:46.766885 4612 reflector.go:561] object-"cert-manager"/"cert-manager-dockercfg-dbcml": failed to list *v1.Secret: secrets "cert-manager-dockercfg-dbcml" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "cert-manager": no relationship found between node 'crc' and this object Dec 03 07:37:46 crc kubenswrapper[4612]: E1203 07:37:46.766977 4612 reflector.go:158] "Unhandled Error" err="object-\"cert-manager\"/\"cert-manager-dockercfg-dbcml\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"cert-manager-dockercfg-dbcml\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"cert-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.787143 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-28nxc"] Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.787910 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-28nxc" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.790170 4612 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-qgltv" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.794214 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-4kwk4"] Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.811352 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-28nxc"] Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.823053 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5tv9\" (UniqueName: \"kubernetes.io/projected/f221cbcd-d224-483f-b688-6d877302a502-kube-api-access-h5tv9\") pod \"cert-manager-cainjector-7f985d654d-6gdmj\" (UID: \"f221cbcd-d224-483f-b688-6d877302a502\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-6gdmj" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.924281 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6p5nw\" (UniqueName: \"kubernetes.io/projected/a8b94ba1-6d11-4835-9ee9-e1756681dc3e-kube-api-access-6p5nw\") pod \"cert-manager-5b446d88c5-4kwk4\" (UID: \"a8b94ba1-6d11-4835-9ee9-e1756681dc3e\") " pod="cert-manager/cert-manager-5b446d88c5-4kwk4" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.924345 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqrh5\" (UniqueName: \"kubernetes.io/projected/ac4674d9-ae54-48a4-858b-75a91546ddd9-kube-api-access-wqrh5\") pod \"cert-manager-webhook-5655c58dd6-28nxc\" (UID: \"ac4674d9-ae54-48a4-858b-75a91546ddd9\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-28nxc" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.924403 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5tv9\" (UniqueName: \"kubernetes.io/projected/f221cbcd-d224-483f-b688-6d877302a502-kube-api-access-h5tv9\") pod \"cert-manager-cainjector-7f985d654d-6gdmj\" (UID: \"f221cbcd-d224-483f-b688-6d877302a502\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-6gdmj" Dec 03 07:37:46 crc kubenswrapper[4612]: I1203 07:37:46.946839 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5tv9\" (UniqueName: \"kubernetes.io/projected/f221cbcd-d224-483f-b688-6d877302a502-kube-api-access-h5tv9\") pod \"cert-manager-cainjector-7f985d654d-6gdmj\" (UID: \"f221cbcd-d224-483f-b688-6d877302a502\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-6gdmj" Dec 03 07:37:47 crc kubenswrapper[4612]: I1203 07:37:47.026004 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6p5nw\" (UniqueName: \"kubernetes.io/projected/a8b94ba1-6d11-4835-9ee9-e1756681dc3e-kube-api-access-6p5nw\") pod \"cert-manager-5b446d88c5-4kwk4\" (UID: \"a8b94ba1-6d11-4835-9ee9-e1756681dc3e\") " pod="cert-manager/cert-manager-5b446d88c5-4kwk4" Dec 03 07:37:47 crc kubenswrapper[4612]: I1203 07:37:47.026062 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqrh5\" (UniqueName: \"kubernetes.io/projected/ac4674d9-ae54-48a4-858b-75a91546ddd9-kube-api-access-wqrh5\") pod \"cert-manager-webhook-5655c58dd6-28nxc\" (UID: \"ac4674d9-ae54-48a4-858b-75a91546ddd9\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-28nxc" Dec 03 07:37:47 crc kubenswrapper[4612]: I1203 07:37:47.042322 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqrh5\" (UniqueName: \"kubernetes.io/projected/ac4674d9-ae54-48a4-858b-75a91546ddd9-kube-api-access-wqrh5\") pod \"cert-manager-webhook-5655c58dd6-28nxc\" (UID: \"ac4674d9-ae54-48a4-858b-75a91546ddd9\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-28nxc" Dec 03 07:37:47 crc kubenswrapper[4612]: I1203 07:37:47.049295 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6p5nw\" (UniqueName: \"kubernetes.io/projected/a8b94ba1-6d11-4835-9ee9-e1756681dc3e-kube-api-access-6p5nw\") pod \"cert-manager-5b446d88c5-4kwk4\" (UID: \"a8b94ba1-6d11-4835-9ee9-e1756681dc3e\") " pod="cert-manager/cert-manager-5b446d88c5-4kwk4" Dec 03 07:37:47 crc kubenswrapper[4612]: I1203 07:37:47.066232 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-6gdmj" Dec 03 07:37:47 crc kubenswrapper[4612]: I1203 07:37:47.100649 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-28nxc" Dec 03 07:37:47 crc kubenswrapper[4612]: I1203 07:37:47.311434 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-6gdmj"] Dec 03 07:37:47 crc kubenswrapper[4612]: I1203 07:37:47.328345 4612 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 07:37:47 crc kubenswrapper[4612]: I1203 07:37:47.369736 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-28nxc"] Dec 03 07:37:47 crc kubenswrapper[4612]: W1203 07:37:47.373092 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac4674d9_ae54_48a4_858b_75a91546ddd9.slice/crio-180619af058ee5bca9aa021a15247aab75ffd8b204571831159416ceaee47be3 WatchSource:0}: Error finding container 180619af058ee5bca9aa021a15247aab75ffd8b204571831159416ceaee47be3: Status 404 returned error can't find the container with id 180619af058ee5bca9aa021a15247aab75ffd8b204571831159416ceaee47be3 Dec 03 07:37:47 crc kubenswrapper[4612]: I1203 07:37:47.725716 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-28nxc" event={"ID":"ac4674d9-ae54-48a4-858b-75a91546ddd9","Type":"ContainerStarted","Data":"180619af058ee5bca9aa021a15247aab75ffd8b204571831159416ceaee47be3"} Dec 03 07:37:47 crc kubenswrapper[4612]: I1203 07:37:47.728066 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-6gdmj" event={"ID":"f221cbcd-d224-483f-b688-6d877302a502","Type":"ContainerStarted","Data":"c79b635ff622374544c469306c3561d255687e4a11454ce11a5bd6a79eee3437"} Dec 03 07:37:47 crc kubenswrapper[4612]: I1203 07:37:47.987553 4612 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-dbcml" Dec 03 07:37:47 crc kubenswrapper[4612]: I1203 07:37:47.990637 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-4kwk4" Dec 03 07:37:48 crc kubenswrapper[4612]: I1203 07:37:48.214376 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-4kwk4"] Dec 03 07:37:48 crc kubenswrapper[4612]: I1203 07:37:48.733582 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-4kwk4" event={"ID":"a8b94ba1-6d11-4835-9ee9-e1756681dc3e","Type":"ContainerStarted","Data":"9b1040ff09d6d5f6c1adec33fe4fd5649e62bbd18e4d7332f5b3b5ff4b44b5bb"} Dec 03 07:37:52 crc kubenswrapper[4612]: I1203 07:37:52.757269 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-6gdmj" event={"ID":"f221cbcd-d224-483f-b688-6d877302a502","Type":"ContainerStarted","Data":"ac8030121de412ac19cb5a5a9bbdf0d8a6bd935d811992fccc4fa3bd51a88268"} Dec 03 07:37:52 crc kubenswrapper[4612]: I1203 07:37:52.758974 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-28nxc" event={"ID":"ac4674d9-ae54-48a4-858b-75a91546ddd9","Type":"ContainerStarted","Data":"ef26a5116fd9d84f43283edeb05d5b0aa4fc29620ca1ac62f016562536d2f7fd"} Dec 03 07:37:52 crc kubenswrapper[4612]: I1203 07:37:52.759474 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-28nxc" Dec 03 07:37:52 crc kubenswrapper[4612]: I1203 07:37:52.760338 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-4kwk4" event={"ID":"a8b94ba1-6d11-4835-9ee9-e1756681dc3e","Type":"ContainerStarted","Data":"e65f5fefee6de472713bcc5872867d63ea56b6f079e8a7418682ae772e6aa95b"} Dec 03 07:37:52 crc kubenswrapper[4612]: I1203 07:37:52.773878 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-6gdmj" podStartSLOduration=1.5819363659999999 podStartE2EDuration="6.773863773s" podCreationTimestamp="2025-12-03 07:37:46 +0000 UTC" firstStartedPulling="2025-12-03 07:37:47.328068711 +0000 UTC m=+630.501426111" lastFinishedPulling="2025-12-03 07:37:52.519996108 +0000 UTC m=+635.693353518" observedRunningTime="2025-12-03 07:37:52.771519084 +0000 UTC m=+635.944876484" watchObservedRunningTime="2025-12-03 07:37:52.773863773 +0000 UTC m=+635.947221173" Dec 03 07:37:52 crc kubenswrapper[4612]: I1203 07:37:52.788002 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-28nxc" podStartSLOduration=1.695685039 podStartE2EDuration="6.787983147s" podCreationTimestamp="2025-12-03 07:37:46 +0000 UTC" firstStartedPulling="2025-12-03 07:37:47.375026409 +0000 UTC m=+630.548383809" lastFinishedPulling="2025-12-03 07:37:52.467324517 +0000 UTC m=+635.640681917" observedRunningTime="2025-12-03 07:37:52.787534865 +0000 UTC m=+635.960892265" watchObservedRunningTime="2025-12-03 07:37:52.787983147 +0000 UTC m=+635.961340567" Dec 03 07:37:52 crc kubenswrapper[4612]: I1203 07:37:52.805458 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-4kwk4" podStartSLOduration=2.55370622 podStartE2EDuration="6.805440544s" podCreationTimestamp="2025-12-03 07:37:46 +0000 UTC" firstStartedPulling="2025-12-03 07:37:48.22225429 +0000 UTC m=+631.395611690" lastFinishedPulling="2025-12-03 07:37:52.473988614 +0000 UTC m=+635.647346014" observedRunningTime="2025-12-03 07:37:52.802227594 +0000 UTC m=+635.975584994" watchObservedRunningTime="2025-12-03 07:37:52.805440544 +0000 UTC m=+635.978797944" Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.447130 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9krtb"] Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.447880 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovn-controller" containerID="cri-o://3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9" gracePeriod=30 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.447960 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45" gracePeriod=30 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.448001 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="nbdb" containerID="cri-o://5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d" gracePeriod=30 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.448072 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="northd" containerID="cri-o://0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228" gracePeriod=30 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.448153 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovn-acl-logging" containerID="cri-o://d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6" gracePeriod=30 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.448204 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="kube-rbac-proxy-node" containerID="cri-o://ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2" gracePeriod=30 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.448265 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="sbdb" containerID="cri-o://df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e" gracePeriod=30 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.486045 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" containerID="cri-o://8e13493233264eeb98bd45762266f4d596faa09bc50a48ba9abc4680b2fe3476" gracePeriod=30 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.783492 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p52kb_b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d/kube-multus/2.log" Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.783860 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p52kb_b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d/kube-multus/1.log" Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.783893 4612 generic.go:334] "Generic (PLEG): container finished" podID="b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d" containerID="aba252007b0952f38838dbf99833409c0e250af97a1871710216aae27e184d51" exitCode=2 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.783953 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-p52kb" event={"ID":"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d","Type":"ContainerDied","Data":"aba252007b0952f38838dbf99833409c0e250af97a1871710216aae27e184d51"} Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.784007 4612 scope.go:117] "RemoveContainer" containerID="1e2d8f268ad6bc13c478d54774c1d91ca7f7512b0f5c675f2a360be64babac7c" Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.784379 4612 scope.go:117] "RemoveContainer" containerID="aba252007b0952f38838dbf99833409c0e250af97a1871710216aae27e184d51" Dec 03 07:37:56 crc kubenswrapper[4612]: E1203 07:37:56.784595 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-p52kb_openshift-multus(b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d)\"" pod="openshift-multus/multus-p52kb" podUID="b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d" Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.791070 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovnkube-controller/3.log" Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.792798 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovn-acl-logging/0.log" Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.793727 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovn-controller/0.log" Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.794080 4612 generic.go:334] "Generic (PLEG): container finished" podID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerID="8e13493233264eeb98bd45762266f4d596faa09bc50a48ba9abc4680b2fe3476" exitCode=0 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.794104 4612 generic.go:334] "Generic (PLEG): container finished" podID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerID="df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e" exitCode=0 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.794116 4612 generic.go:334] "Generic (PLEG): container finished" podID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerID="0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228" exitCode=0 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.794125 4612 generic.go:334] "Generic (PLEG): container finished" podID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerID="a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45" exitCode=0 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.794137 4612 generic.go:334] "Generic (PLEG): container finished" podID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerID="ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2" exitCode=0 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.794146 4612 generic.go:334] "Generic (PLEG): container finished" podID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerID="d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6" exitCode=143 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.794155 4612 generic.go:334] "Generic (PLEG): container finished" podID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerID="3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9" exitCode=143 Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.794119 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerDied","Data":"8e13493233264eeb98bd45762266f4d596faa09bc50a48ba9abc4680b2fe3476"} Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.794191 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerDied","Data":"df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e"} Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.794207 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerDied","Data":"0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228"} Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.794220 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerDied","Data":"a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45"} Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.794230 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerDied","Data":"ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2"} Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.794240 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerDied","Data":"d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6"} Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.794250 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerDied","Data":"3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9"} Dec 03 07:37:56 crc kubenswrapper[4612]: I1203 07:37:56.866958 4612 scope.go:117] "RemoveContainer" containerID="77b7a58278abf172c403f8814db67503b55ef064c7bf6763c010234431a10b74" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.103232 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-28nxc" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.141813 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovn-acl-logging/0.log" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.142768 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovn-controller/0.log" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.143219 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231138 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mv9zz"] Dec 03 07:37:57 crc kubenswrapper[4612]: E1203 07:37:57.231449 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231467 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: E1203 07:37:57.231485 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="nbdb" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231496 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="nbdb" Dec 03 07:37:57 crc kubenswrapper[4612]: E1203 07:37:57.231512 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovn-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231522 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovn-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: E1203 07:37:57.231536 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="kube-rbac-proxy-node" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231545 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="kube-rbac-proxy-node" Dec 03 07:37:57 crc kubenswrapper[4612]: E1203 07:37:57.231564 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231574 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: E1203 07:37:57.231589 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="kubecfg-setup" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231599 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="kubecfg-setup" Dec 03 07:37:57 crc kubenswrapper[4612]: E1203 07:37:57.231617 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231628 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 07:37:57 crc kubenswrapper[4612]: E1203 07:37:57.231643 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovn-acl-logging" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231653 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovn-acl-logging" Dec 03 07:37:57 crc kubenswrapper[4612]: E1203 07:37:57.231666 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="northd" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231676 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="northd" Dec 03 07:37:57 crc kubenswrapper[4612]: E1203 07:37:57.231692 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="sbdb" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231702 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="sbdb" Dec 03 07:37:57 crc kubenswrapper[4612]: E1203 07:37:57.231715 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231726 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231859 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovn-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231874 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="kube-rbac-proxy-node" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231885 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="sbdb" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231898 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231913 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231926 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231938 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovn-acl-logging" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231971 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231984 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.231997 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="northd" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.232008 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.232020 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="nbdb" Dec 03 07:37:57 crc kubenswrapper[4612]: E1203 07:37:57.232154 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.232165 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: E1203 07:37:57.232182 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.232191 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerName="ovnkube-controller" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.234855 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.264535 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.264629 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-run-ovn-kubernetes\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.264708 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-openvswitch\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.264739 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-kubelet\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.264781 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovnkube-script-lib\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.264837 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-node-log\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.264886 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-env-overrides\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.264926 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-cni-bin\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.265021 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovnkube-config\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.265113 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-ovn\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.265158 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-cni-netd\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.265206 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-run-netns\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.265248 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-var-lib-openvswitch\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.265301 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksmrn\" (UniqueName: \"kubernetes.io/projected/64b21a08-7c39-4c31-a34d-88e74edf88c6-kube-api-access-ksmrn\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.265342 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-etc-openvswitch\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.265385 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-systemd-units\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.265427 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-systemd\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.265479 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-slash\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.265543 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-log-socket\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.265643 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovn-node-metrics-cert\") pod \"64b21a08-7c39-4c31-a34d-88e74edf88c6\" (UID: \"64b21a08-7c39-4c31-a34d-88e74edf88c6\") " Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.266267 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-node-log" (OuterVolumeSpecName: "node-log") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.266299 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.266364 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.266390 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.266415 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.266438 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.266674 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.266728 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-slash" (OuterVolumeSpecName: "host-slash") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.266738 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.266750 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.266790 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-log-socket" (OuterVolumeSpecName: "log-socket") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.266937 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.267046 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.267090 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.267139 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.266264 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.267617 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.280431 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64b21a08-7c39-4c31-a34d-88e74edf88c6-kube-api-access-ksmrn" (OuterVolumeSpecName: "kube-api-access-ksmrn") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "kube-api-access-ksmrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.281177 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.282499 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "64b21a08-7c39-4c31-a34d-88e74edf88c6" (UID: "64b21a08-7c39-4c31-a34d-88e74edf88c6"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.367483 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-kubelet\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.367696 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kmvs\" (UniqueName: \"kubernetes.io/projected/5289166c-e76f-41bc-bd6c-e5100563fcb4-kube-api-access-8kmvs\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.367781 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.367806 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-run-systemd\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.367910 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-log-socket\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.367936 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-cni-bin\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.367997 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-run-netns\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368018 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-node-log\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368062 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5289166c-e76f-41bc-bd6c-e5100563fcb4-ovnkube-config\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368097 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5289166c-e76f-41bc-bd6c-e5100563fcb4-ovnkube-script-lib\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368158 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-run-ovn-kubernetes\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368225 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-systemd-units\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368264 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-run-ovn\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368291 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5289166c-e76f-41bc-bd6c-e5100563fcb4-env-overrides\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368310 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5289166c-e76f-41bc-bd6c-e5100563fcb4-ovn-node-metrics-cert\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368333 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-etc-openvswitch\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368363 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-cni-netd\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368398 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-slash\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368417 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-run-openvswitch\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368455 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-var-lib-openvswitch\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368510 4612 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368551 4612 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368565 4612 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368579 4612 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368590 4612 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368663 4612 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368674 4612 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-node-log\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368685 4612 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368695 4612 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368729 4612 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/64b21a08-7c39-4c31-a34d-88e74edf88c6-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368743 4612 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368755 4612 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368766 4612 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368777 4612 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368788 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksmrn\" (UniqueName: \"kubernetes.io/projected/64b21a08-7c39-4c31-a34d-88e74edf88c6-kube-api-access-ksmrn\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368800 4612 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368811 4612 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368821 4612 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368832 4612 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-host-slash\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.368845 4612 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/64b21a08-7c39-4c31-a34d-88e74edf88c6-log-socket\") on node \"crc\" DevicePath \"\"" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470444 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-cni-bin\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470493 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-run-netns\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470512 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-node-log\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470530 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5289166c-e76f-41bc-bd6c-e5100563fcb4-ovnkube-config\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470539 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-cni-bin\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470558 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-run-netns\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470552 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5289166c-e76f-41bc-bd6c-e5100563fcb4-ovnkube-script-lib\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470638 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-node-log\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470674 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-run-ovn-kubernetes\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470716 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-run-ovn-kubernetes\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470789 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-systemd-units\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470861 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-run-ovn\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470907 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5289166c-e76f-41bc-bd6c-e5100563fcb4-env-overrides\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470958 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5289166c-e76f-41bc-bd6c-e5100563fcb4-ovn-node-metrics-cert\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.470992 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-etc-openvswitch\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471024 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-cni-netd\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471362 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5289166c-e76f-41bc-bd6c-e5100563fcb4-env-overrides\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471224 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-systemd-units\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471241 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5289166c-e76f-41bc-bd6c-e5100563fcb4-ovnkube-script-lib\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471270 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-etc-openvswitch\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471398 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-slash\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471426 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-slash\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471292 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-run-ovn\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471201 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5289166c-e76f-41bc-bd6c-e5100563fcb4-ovnkube-config\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471430 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-run-openvswitch\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471469 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-run-openvswitch\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471476 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-var-lib-openvswitch\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471502 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-var-lib-openvswitch\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471314 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-cni-netd\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471551 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-kubelet\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471608 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kmvs\" (UniqueName: \"kubernetes.io/projected/5289166c-e76f-41bc-bd6c-e5100563fcb4-kube-api-access-8kmvs\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471673 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-kubelet\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471670 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471711 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471724 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-run-systemd\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471765 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-log-socket\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471837 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-log-socket\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.471844 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5289166c-e76f-41bc-bd6c-e5100563fcb4-run-systemd\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.474580 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5289166c-e76f-41bc-bd6c-e5100563fcb4-ovn-node-metrics-cert\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.487620 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kmvs\" (UniqueName: \"kubernetes.io/projected/5289166c-e76f-41bc-bd6c-e5100563fcb4-kube-api-access-8kmvs\") pod \"ovnkube-node-mv9zz\" (UID: \"5289166c-e76f-41bc-bd6c-e5100563fcb4\") " pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.561191 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:37:57 crc kubenswrapper[4612]: W1203 07:37:57.576163 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5289166c_e76f_41bc_bd6c_e5100563fcb4.slice/crio-6e4e742e866717c6073a01d1adde2f1c9e04633a1026530a2b57e2ca1e1e0a2a WatchSource:0}: Error finding container 6e4e742e866717c6073a01d1adde2f1c9e04633a1026530a2b57e2ca1e1e0a2a: Status 404 returned error can't find the container with id 6e4e742e866717c6073a01d1adde2f1c9e04633a1026530a2b57e2ca1e1e0a2a Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.800766 4612 generic.go:334] "Generic (PLEG): container finished" podID="5289166c-e76f-41bc-bd6c-e5100563fcb4" containerID="78c3c1f1e0a64106ecb468c988dc9bf0cbab00cdad7f619ff9f03aee494d3553" exitCode=0 Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.800864 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" event={"ID":"5289166c-e76f-41bc-bd6c-e5100563fcb4","Type":"ContainerDied","Data":"78c3c1f1e0a64106ecb468c988dc9bf0cbab00cdad7f619ff9f03aee494d3553"} Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.800923 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" event={"ID":"5289166c-e76f-41bc-bd6c-e5100563fcb4","Type":"ContainerStarted","Data":"6e4e742e866717c6073a01d1adde2f1c9e04633a1026530a2b57e2ca1e1e0a2a"} Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.815261 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovn-acl-logging/0.log" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.816454 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9krtb_64b21a08-7c39-4c31-a34d-88e74edf88c6/ovn-controller/0.log" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.816994 4612 generic.go:334] "Generic (PLEG): container finished" podID="64b21a08-7c39-4c31-a34d-88e74edf88c6" containerID="5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d" exitCode=0 Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.817085 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerDied","Data":"5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d"} Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.817114 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" event={"ID":"64b21a08-7c39-4c31-a34d-88e74edf88c6","Type":"ContainerDied","Data":"09918bbcce6dbd6b285fac5495c8fbfed1580327cc63d01e4880a3b8239f2fb5"} Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.817141 4612 scope.go:117] "RemoveContainer" containerID="8e13493233264eeb98bd45762266f4d596faa09bc50a48ba9abc4680b2fe3476" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.817148 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9krtb" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.818617 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p52kb_b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d/kube-multus/2.log" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.832141 4612 scope.go:117] "RemoveContainer" containerID="df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.870044 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9krtb"] Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.871642 4612 scope.go:117] "RemoveContainer" containerID="5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.875835 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9krtb"] Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.885229 4612 scope.go:117] "RemoveContainer" containerID="0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.912365 4612 scope.go:117] "RemoveContainer" containerID="a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.926609 4612 scope.go:117] "RemoveContainer" containerID="ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.943685 4612 scope.go:117] "RemoveContainer" containerID="d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.961213 4612 scope.go:117] "RemoveContainer" containerID="3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9" Dec 03 07:37:57 crc kubenswrapper[4612]: I1203 07:37:57.986016 4612 scope.go:117] "RemoveContainer" containerID="eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.008317 4612 scope.go:117] "RemoveContainer" containerID="8e13493233264eeb98bd45762266f4d596faa09bc50a48ba9abc4680b2fe3476" Dec 03 07:37:58 crc kubenswrapper[4612]: E1203 07:37:58.008739 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e13493233264eeb98bd45762266f4d596faa09bc50a48ba9abc4680b2fe3476\": container with ID starting with 8e13493233264eeb98bd45762266f4d596faa09bc50a48ba9abc4680b2fe3476 not found: ID does not exist" containerID="8e13493233264eeb98bd45762266f4d596faa09bc50a48ba9abc4680b2fe3476" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.008775 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e13493233264eeb98bd45762266f4d596faa09bc50a48ba9abc4680b2fe3476"} err="failed to get container status \"8e13493233264eeb98bd45762266f4d596faa09bc50a48ba9abc4680b2fe3476\": rpc error: code = NotFound desc = could not find container \"8e13493233264eeb98bd45762266f4d596faa09bc50a48ba9abc4680b2fe3476\": container with ID starting with 8e13493233264eeb98bd45762266f4d596faa09bc50a48ba9abc4680b2fe3476 not found: ID does not exist" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.008792 4612 scope.go:117] "RemoveContainer" containerID="df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e" Dec 03 07:37:58 crc kubenswrapper[4612]: E1203 07:37:58.009127 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\": container with ID starting with df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e not found: ID does not exist" containerID="df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.009152 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e"} err="failed to get container status \"df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\": rpc error: code = NotFound desc = could not find container \"df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e\": container with ID starting with df3dc900efcc27e63d8456fe2ef37dd4f6090121c74aaa3d3355d731ca679d8e not found: ID does not exist" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.009168 4612 scope.go:117] "RemoveContainer" containerID="5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d" Dec 03 07:37:58 crc kubenswrapper[4612]: E1203 07:37:58.009633 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\": container with ID starting with 5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d not found: ID does not exist" containerID="5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.009699 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d"} err="failed to get container status \"5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\": rpc error: code = NotFound desc = could not find container \"5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d\": container with ID starting with 5083f1011edc3b82a55cc3110c41db003ad32c364699c24261b87979c3f5387d not found: ID does not exist" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.009719 4612 scope.go:117] "RemoveContainer" containerID="0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228" Dec 03 07:37:58 crc kubenswrapper[4612]: E1203 07:37:58.009987 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\": container with ID starting with 0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228 not found: ID does not exist" containerID="0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.010012 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228"} err="failed to get container status \"0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\": rpc error: code = NotFound desc = could not find container \"0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228\": container with ID starting with 0c908a461f014c22368d605286e195d1a20eeefeff2a926bbbe99c7cc3a7b228 not found: ID does not exist" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.010028 4612 scope.go:117] "RemoveContainer" containerID="a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45" Dec 03 07:37:58 crc kubenswrapper[4612]: E1203 07:37:58.010250 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\": container with ID starting with a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45 not found: ID does not exist" containerID="a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.010268 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45"} err="failed to get container status \"a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\": rpc error: code = NotFound desc = could not find container \"a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45\": container with ID starting with a18308ea681dbe28a26d67706a654ad5184624e9f7a05c344aa1985d68aa4e45 not found: ID does not exist" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.010282 4612 scope.go:117] "RemoveContainer" containerID="ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2" Dec 03 07:37:58 crc kubenswrapper[4612]: E1203 07:37:58.010638 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\": container with ID starting with ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2 not found: ID does not exist" containerID="ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.010660 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2"} err="failed to get container status \"ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\": rpc error: code = NotFound desc = could not find container \"ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2\": container with ID starting with ec7430935a60d2f6f7d2aaa12059af63b6ef7d72fe8cf5ca997e4daf0832fcd2 not found: ID does not exist" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.010672 4612 scope.go:117] "RemoveContainer" containerID="d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6" Dec 03 07:37:58 crc kubenswrapper[4612]: E1203 07:37:58.010908 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\": container with ID starting with d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6 not found: ID does not exist" containerID="d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.010927 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6"} err="failed to get container status \"d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\": rpc error: code = NotFound desc = could not find container \"d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6\": container with ID starting with d87b6cc542addbf9e2fbca3cf3e7b4bcfd0ea0b704a88d8373d60ceaaa78f4d6 not found: ID does not exist" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.010963 4612 scope.go:117] "RemoveContainer" containerID="3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9" Dec 03 07:37:58 crc kubenswrapper[4612]: E1203 07:37:58.011173 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\": container with ID starting with 3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9 not found: ID does not exist" containerID="3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.011203 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9"} err="failed to get container status \"3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\": rpc error: code = NotFound desc = could not find container \"3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9\": container with ID starting with 3ea0d15810e791d9679dda7f0c1318e9b46a3bc7b6a9baed624c52a9009c4de9 not found: ID does not exist" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.011222 4612 scope.go:117] "RemoveContainer" containerID="eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace" Dec 03 07:37:58 crc kubenswrapper[4612]: E1203 07:37:58.011495 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\": container with ID starting with eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace not found: ID does not exist" containerID="eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.011519 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace"} err="failed to get container status \"eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\": rpc error: code = NotFound desc = could not find container \"eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace\": container with ID starting with eeda43272c77e30fab5f8b1ac9ffc0355e9894f22780ba640cfb6f2306172ace not found: ID does not exist" Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.828672 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" event={"ID":"5289166c-e76f-41bc-bd6c-e5100563fcb4","Type":"ContainerStarted","Data":"18fd01beb68e5a92b14f903d11da581da38c0d15c34b0f858f8deb87f857f90b"} Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.828714 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" event={"ID":"5289166c-e76f-41bc-bd6c-e5100563fcb4","Type":"ContainerStarted","Data":"01c8be95d992d0623448e5f6b9e15d17d6bff8402b49fbbfad74f9de475356e8"} Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.828725 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" event={"ID":"5289166c-e76f-41bc-bd6c-e5100563fcb4","Type":"ContainerStarted","Data":"f2f9961419a8041f6d0392ed2ad15f70286905d17231c602a642cde2070ea2e7"} Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.828736 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" event={"ID":"5289166c-e76f-41bc-bd6c-e5100563fcb4","Type":"ContainerStarted","Data":"ea5fa9baff044789355081aec250c575dcd52ee4a55b39e08a168770cd01c36a"} Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.828747 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" event={"ID":"5289166c-e76f-41bc-bd6c-e5100563fcb4","Type":"ContainerStarted","Data":"ee20482bcea9fe9a31c2f4df2db1acdbff0a4e1cb149916ab57da3c1a29c0383"} Dec 03 07:37:58 crc kubenswrapper[4612]: I1203 07:37:58.828757 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" event={"ID":"5289166c-e76f-41bc-bd6c-e5100563fcb4","Type":"ContainerStarted","Data":"5c205f421afa23aaa5c2056119efd81b6c965cc42b74f572a5ed955749c58178"} Dec 03 07:37:59 crc kubenswrapper[4612]: I1203 07:37:59.102819 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64b21a08-7c39-4c31-a34d-88e74edf88c6" path="/var/lib/kubelet/pods/64b21a08-7c39-4c31-a34d-88e74edf88c6/volumes" Dec 03 07:38:00 crc kubenswrapper[4612]: I1203 07:38:00.846301 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" event={"ID":"5289166c-e76f-41bc-bd6c-e5100563fcb4","Type":"ContainerStarted","Data":"8d10997b865f16ac88eb2bf4e05ff2bf89940d68471357cdfa39b1fa9c3a8de5"} Dec 03 07:38:02 crc kubenswrapper[4612]: I1203 07:38:02.864673 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" event={"ID":"5289166c-e76f-41bc-bd6c-e5100563fcb4","Type":"ContainerStarted","Data":"95e4717bbd7cfd159d63f455d846dad14130bf81dde03163877837fbafbf91b2"} Dec 03 07:38:02 crc kubenswrapper[4612]: I1203 07:38:02.865193 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:38:02 crc kubenswrapper[4612]: I1203 07:38:02.865561 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:38:02 crc kubenswrapper[4612]: I1203 07:38:02.865733 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:38:02 crc kubenswrapper[4612]: I1203 07:38:02.911372 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" podStartSLOduration=5.91135085 podStartE2EDuration="5.91135085s" podCreationTimestamp="2025-12-03 07:37:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:38:02.904751284 +0000 UTC m=+646.078108694" watchObservedRunningTime="2025-12-03 07:38:02.91135085 +0000 UTC m=+646.084708270" Dec 03 07:38:02 crc kubenswrapper[4612]: I1203 07:38:02.912502 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:38:02 crc kubenswrapper[4612]: I1203 07:38:02.913808 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:38:11 crc kubenswrapper[4612]: I1203 07:38:11.089769 4612 scope.go:117] "RemoveContainer" containerID="aba252007b0952f38838dbf99833409c0e250af97a1871710216aae27e184d51" Dec 03 07:38:11 crc kubenswrapper[4612]: E1203 07:38:11.090826 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-p52kb_openshift-multus(b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d)\"" pod="openshift-multus/multus-p52kb" podUID="b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d" Dec 03 07:38:23 crc kubenswrapper[4612]: I1203 07:38:23.088972 4612 scope.go:117] "RemoveContainer" containerID="aba252007b0952f38838dbf99833409c0e250af97a1871710216aae27e184d51" Dec 03 07:38:23 crc kubenswrapper[4612]: I1203 07:38:23.980019 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p52kb_b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d/kube-multus/2.log" Dec 03 07:38:23 crc kubenswrapper[4612]: I1203 07:38:23.980452 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-p52kb" event={"ID":"b22d1dce-44e3-4b8b-aab3-4ce7702b5c6d","Type":"ContainerStarted","Data":"76f8cde7dec310f110fac911f232b11bdedb9f9d48a7e2617d559abe220859e7"} Dec 03 07:38:27 crc kubenswrapper[4612]: I1203 07:38:27.582048 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mv9zz" Dec 03 07:38:42 crc kubenswrapper[4612]: I1203 07:38:42.414022 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr"] Dec 03 07:38:42 crc kubenswrapper[4612]: I1203 07:38:42.415684 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" Dec 03 07:38:42 crc kubenswrapper[4612]: I1203 07:38:42.420227 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 07:38:42 crc kubenswrapper[4612]: I1203 07:38:42.422452 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr"] Dec 03 07:38:42 crc kubenswrapper[4612]: I1203 07:38:42.432260 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr\" (UID: \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" Dec 03 07:38:42 crc kubenswrapper[4612]: I1203 07:38:42.432308 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr\" (UID: \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" Dec 03 07:38:42 crc kubenswrapper[4612]: I1203 07:38:42.432342 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64vsk\" (UniqueName: \"kubernetes.io/projected/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-kube-api-access-64vsk\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr\" (UID: \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" Dec 03 07:38:42 crc kubenswrapper[4612]: I1203 07:38:42.533381 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64vsk\" (UniqueName: \"kubernetes.io/projected/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-kube-api-access-64vsk\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr\" (UID: \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" Dec 03 07:38:42 crc kubenswrapper[4612]: I1203 07:38:42.533821 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr\" (UID: \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" Dec 03 07:38:42 crc kubenswrapper[4612]: I1203 07:38:42.534026 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr\" (UID: \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" Dec 03 07:38:42 crc kubenswrapper[4612]: I1203 07:38:42.534389 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr\" (UID: \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" Dec 03 07:38:42 crc kubenswrapper[4612]: I1203 07:38:42.534604 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr\" (UID: \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" Dec 03 07:38:42 crc kubenswrapper[4612]: I1203 07:38:42.552416 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64vsk\" (UniqueName: \"kubernetes.io/projected/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-kube-api-access-64vsk\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr\" (UID: \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" Dec 03 07:38:42 crc kubenswrapper[4612]: I1203 07:38:42.729772 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" Dec 03 07:38:43 crc kubenswrapper[4612]: I1203 07:38:43.120387 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr"] Dec 03 07:38:43 crc kubenswrapper[4612]: W1203 07:38:43.126792 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4626e1e5_23a3_47d8_98ae_3a4ee0c39c92.slice/crio-77ad26569e27b31c3a50e631761d87c46dcf5255b7a5235e42b41a256b6651a9 WatchSource:0}: Error finding container 77ad26569e27b31c3a50e631761d87c46dcf5255b7a5235e42b41a256b6651a9: Status 404 returned error can't find the container with id 77ad26569e27b31c3a50e631761d87c46dcf5255b7a5235e42b41a256b6651a9 Dec 03 07:38:44 crc kubenswrapper[4612]: I1203 07:38:44.099120 4612 generic.go:334] "Generic (PLEG): container finished" podID="4626e1e5-23a3-47d8-98ae-3a4ee0c39c92" containerID="d23f37ce4a7a8c8cca62e4a8c66422ecb74be1062428824dbecdda8626e95eb1" exitCode=0 Dec 03 07:38:44 crc kubenswrapper[4612]: I1203 07:38:44.099160 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" event={"ID":"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92","Type":"ContainerDied","Data":"d23f37ce4a7a8c8cca62e4a8c66422ecb74be1062428824dbecdda8626e95eb1"} Dec 03 07:38:44 crc kubenswrapper[4612]: I1203 07:38:44.099183 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" event={"ID":"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92","Type":"ContainerStarted","Data":"77ad26569e27b31c3a50e631761d87c46dcf5255b7a5235e42b41a256b6651a9"} Dec 03 07:38:46 crc kubenswrapper[4612]: I1203 07:38:46.121984 4612 generic.go:334] "Generic (PLEG): container finished" podID="4626e1e5-23a3-47d8-98ae-3a4ee0c39c92" containerID="49c2a3a43262f93b7ce057b1674a7b1c977a150fdf798c69f18cefb174c14549" exitCode=0 Dec 03 07:38:46 crc kubenswrapper[4612]: I1203 07:38:46.122075 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" event={"ID":"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92","Type":"ContainerDied","Data":"49c2a3a43262f93b7ce057b1674a7b1c977a150fdf798c69f18cefb174c14549"} Dec 03 07:38:47 crc kubenswrapper[4612]: I1203 07:38:47.131500 4612 generic.go:334] "Generic (PLEG): container finished" podID="4626e1e5-23a3-47d8-98ae-3a4ee0c39c92" containerID="c9c214e7d3b85014ecbe5c2ae7ded091e7624c99a465108e20b329fb42e8e705" exitCode=0 Dec 03 07:38:47 crc kubenswrapper[4612]: I1203 07:38:47.131625 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" event={"ID":"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92","Type":"ContainerDied","Data":"c9c214e7d3b85014ecbe5c2ae7ded091e7624c99a465108e20b329fb42e8e705"} Dec 03 07:38:48 crc kubenswrapper[4612]: I1203 07:38:48.346672 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" Dec 03 07:38:48 crc kubenswrapper[4612]: I1203 07:38:48.452985 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-util\") pod \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\" (UID: \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\") " Dec 03 07:38:48 crc kubenswrapper[4612]: I1203 07:38:48.453096 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-bundle\") pod \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\" (UID: \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\") " Dec 03 07:38:48 crc kubenswrapper[4612]: I1203 07:38:48.453146 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64vsk\" (UniqueName: \"kubernetes.io/projected/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-kube-api-access-64vsk\") pod \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\" (UID: \"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92\") " Dec 03 07:38:48 crc kubenswrapper[4612]: I1203 07:38:48.453863 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-bundle" (OuterVolumeSpecName: "bundle") pod "4626e1e5-23a3-47d8-98ae-3a4ee0c39c92" (UID: "4626e1e5-23a3-47d8-98ae-3a4ee0c39c92"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:38:48 crc kubenswrapper[4612]: I1203 07:38:48.459983 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-kube-api-access-64vsk" (OuterVolumeSpecName: "kube-api-access-64vsk") pod "4626e1e5-23a3-47d8-98ae-3a4ee0c39c92" (UID: "4626e1e5-23a3-47d8-98ae-3a4ee0c39c92"). InnerVolumeSpecName "kube-api-access-64vsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:38:48 crc kubenswrapper[4612]: I1203 07:38:48.468310 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-util" (OuterVolumeSpecName: "util") pod "4626e1e5-23a3-47d8-98ae-3a4ee0c39c92" (UID: "4626e1e5-23a3-47d8-98ae-3a4ee0c39c92"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:38:48 crc kubenswrapper[4612]: I1203 07:38:48.555117 4612 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-util\") on node \"crc\" DevicePath \"\"" Dec 03 07:38:48 crc kubenswrapper[4612]: I1203 07:38:48.555171 4612 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:38:48 crc kubenswrapper[4612]: I1203 07:38:48.555191 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64vsk\" (UniqueName: \"kubernetes.io/projected/4626e1e5-23a3-47d8-98ae-3a4ee0c39c92-kube-api-access-64vsk\") on node \"crc\" DevicePath \"\"" Dec 03 07:38:49 crc kubenswrapper[4612]: I1203 07:38:49.147023 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" event={"ID":"4626e1e5-23a3-47d8-98ae-3a4ee0c39c92","Type":"ContainerDied","Data":"77ad26569e27b31c3a50e631761d87c46dcf5255b7a5235e42b41a256b6651a9"} Dec 03 07:38:49 crc kubenswrapper[4612]: I1203 07:38:49.147076 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77ad26569e27b31c3a50e631761d87c46dcf5255b7a5235e42b41a256b6651a9" Dec 03 07:38:49 crc kubenswrapper[4612]: I1203 07:38:49.147148 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr" Dec 03 07:38:51 crc kubenswrapper[4612]: I1203 07:38:51.215764 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-vlsmd"] Dec 03 07:38:51 crc kubenswrapper[4612]: E1203 07:38:51.215998 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4626e1e5-23a3-47d8-98ae-3a4ee0c39c92" containerName="util" Dec 03 07:38:51 crc kubenswrapper[4612]: I1203 07:38:51.216012 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="4626e1e5-23a3-47d8-98ae-3a4ee0c39c92" containerName="util" Dec 03 07:38:51 crc kubenswrapper[4612]: E1203 07:38:51.216025 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4626e1e5-23a3-47d8-98ae-3a4ee0c39c92" containerName="pull" Dec 03 07:38:51 crc kubenswrapper[4612]: I1203 07:38:51.216030 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="4626e1e5-23a3-47d8-98ae-3a4ee0c39c92" containerName="pull" Dec 03 07:38:51 crc kubenswrapper[4612]: E1203 07:38:51.216045 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4626e1e5-23a3-47d8-98ae-3a4ee0c39c92" containerName="extract" Dec 03 07:38:51 crc kubenswrapper[4612]: I1203 07:38:51.216051 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="4626e1e5-23a3-47d8-98ae-3a4ee0c39c92" containerName="extract" Dec 03 07:38:51 crc kubenswrapper[4612]: I1203 07:38:51.216158 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="4626e1e5-23a3-47d8-98ae-3a4ee0c39c92" containerName="extract" Dec 03 07:38:51 crc kubenswrapper[4612]: I1203 07:38:51.216564 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vlsmd" Dec 03 07:38:51 crc kubenswrapper[4612]: I1203 07:38:51.219301 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-8bddv" Dec 03 07:38:51 crc kubenswrapper[4612]: I1203 07:38:51.219910 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 03 07:38:51 crc kubenswrapper[4612]: I1203 07:38:51.220116 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 03 07:38:51 crc kubenswrapper[4612]: I1203 07:38:51.232497 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-vlsmd"] Dec 03 07:38:51 crc kubenswrapper[4612]: I1203 07:38:51.290110 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6tmg\" (UniqueName: \"kubernetes.io/projected/b1d96da3-e5c3-46d4-b29b-6121d6e4d112-kube-api-access-f6tmg\") pod \"nmstate-operator-5b5b58f5c8-vlsmd\" (UID: \"b1d96da3-e5c3-46d4-b29b-6121d6e4d112\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vlsmd" Dec 03 07:38:51 crc kubenswrapper[4612]: I1203 07:38:51.391860 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6tmg\" (UniqueName: \"kubernetes.io/projected/b1d96da3-e5c3-46d4-b29b-6121d6e4d112-kube-api-access-f6tmg\") pod \"nmstate-operator-5b5b58f5c8-vlsmd\" (UID: \"b1d96da3-e5c3-46d4-b29b-6121d6e4d112\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vlsmd" Dec 03 07:38:51 crc kubenswrapper[4612]: I1203 07:38:51.414439 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6tmg\" (UniqueName: \"kubernetes.io/projected/b1d96da3-e5c3-46d4-b29b-6121d6e4d112-kube-api-access-f6tmg\") pod \"nmstate-operator-5b5b58f5c8-vlsmd\" (UID: \"b1d96da3-e5c3-46d4-b29b-6121d6e4d112\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vlsmd" Dec 03 07:38:51 crc kubenswrapper[4612]: I1203 07:38:51.529217 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vlsmd" Dec 03 07:38:52 crc kubenswrapper[4612]: I1203 07:38:52.899108 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-vlsmd"] Dec 03 07:38:53 crc kubenswrapper[4612]: I1203 07:38:53.660871 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vlsmd" event={"ID":"b1d96da3-e5c3-46d4-b29b-6121d6e4d112","Type":"ContainerStarted","Data":"38fce5c07a4a47627af3417782d3d28f5720f3a1c28527ecd96c7f823763487f"} Dec 03 07:38:55 crc kubenswrapper[4612]: I1203 07:38:55.672913 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vlsmd" event={"ID":"b1d96da3-e5c3-46d4-b29b-6121d6e4d112","Type":"ContainerStarted","Data":"323ae6ceb94af1ca0a480d3695f7cc654521f97d69fd4cc237dce3cfe7a856cd"} Dec 03 07:38:55 crc kubenswrapper[4612]: I1203 07:38:55.706122 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vlsmd" podStartSLOduration=2.380292673 podStartE2EDuration="4.706104599s" podCreationTimestamp="2025-12-03 07:38:51 +0000 UTC" firstStartedPulling="2025-12-03 07:38:52.914099642 +0000 UTC m=+696.087457042" lastFinishedPulling="2025-12-03 07:38:55.239911568 +0000 UTC m=+698.413268968" observedRunningTime="2025-12-03 07:38:55.701641787 +0000 UTC m=+698.874999207" watchObservedRunningTime="2025-12-03 07:38:55.706104599 +0000 UTC m=+698.879462019" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.651865 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-7dp5q"] Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.652651 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-7dp5q" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.657901 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-xfk4k" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.661557 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-7dp5q"] Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.666647 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj"] Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.667293 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.668783 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.692571 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj"] Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.709285 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-4mtzb"] Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.709985 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.761630 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fd47d\" (UniqueName: \"kubernetes.io/projected/a69b0d1e-d485-4424-a5a0-607e997bbaf6-kube-api-access-fd47d\") pod \"nmstate-webhook-5f6d4c5ccb-hnhsj\" (UID: \"a69b0d1e-d485-4424-a5a0-607e997bbaf6\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.761682 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/3dfc52ec-c1e6-4162-bccf-6fb5a855212f-ovs-socket\") pod \"nmstate-handler-4mtzb\" (UID: \"3dfc52ec-c1e6-4162-bccf-6fb5a855212f\") " pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.761726 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xq9fr\" (UniqueName: \"kubernetes.io/projected/3dfc52ec-c1e6-4162-bccf-6fb5a855212f-kube-api-access-xq9fr\") pod \"nmstate-handler-4mtzb\" (UID: \"3dfc52ec-c1e6-4162-bccf-6fb5a855212f\") " pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.761811 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/a69b0d1e-d485-4424-a5a0-607e997bbaf6-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-hnhsj\" (UID: \"a69b0d1e-d485-4424-a5a0-607e997bbaf6\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.761848 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/3dfc52ec-c1e6-4162-bccf-6fb5a855212f-dbus-socket\") pod \"nmstate-handler-4mtzb\" (UID: \"3dfc52ec-c1e6-4162-bccf-6fb5a855212f\") " pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.761869 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/3dfc52ec-c1e6-4162-bccf-6fb5a855212f-nmstate-lock\") pod \"nmstate-handler-4mtzb\" (UID: \"3dfc52ec-c1e6-4162-bccf-6fb5a855212f\") " pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.761884 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kzzq\" (UniqueName: \"kubernetes.io/projected/0eea55b7-ecfe-4fc9-bff2-061da172743a-kube-api-access-9kzzq\") pod \"nmstate-metrics-7f946cbc9-7dp5q\" (UID: \"0eea55b7-ecfe-4fc9-bff2-061da172743a\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-7dp5q" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.843794 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65"] Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.844516 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.849209 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.849477 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-tljtb" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.850307 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.856243 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65"] Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.863343 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/a69b0d1e-d485-4424-a5a0-607e997bbaf6-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-hnhsj\" (UID: \"a69b0d1e-d485-4424-a5a0-607e997bbaf6\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.863408 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/3dfc52ec-c1e6-4162-bccf-6fb5a855212f-dbus-socket\") pod \"nmstate-handler-4mtzb\" (UID: \"3dfc52ec-c1e6-4162-bccf-6fb5a855212f\") " pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.863440 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/bd325177-e3fc-476e-b59f-363f1bc2fe0a-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-qgl65\" (UID: \"bd325177-e3fc-476e-b59f-363f1bc2fe0a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.863459 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kzzq\" (UniqueName: \"kubernetes.io/projected/0eea55b7-ecfe-4fc9-bff2-061da172743a-kube-api-access-9kzzq\") pod \"nmstate-metrics-7f946cbc9-7dp5q\" (UID: \"0eea55b7-ecfe-4fc9-bff2-061da172743a\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-7dp5q" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.863475 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/3dfc52ec-c1e6-4162-bccf-6fb5a855212f-nmstate-lock\") pod \"nmstate-handler-4mtzb\" (UID: \"3dfc52ec-c1e6-4162-bccf-6fb5a855212f\") " pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.863491 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/bd325177-e3fc-476e-b59f-363f1bc2fe0a-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-qgl65\" (UID: \"bd325177-e3fc-476e-b59f-363f1bc2fe0a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.863524 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fd47d\" (UniqueName: \"kubernetes.io/projected/a69b0d1e-d485-4424-a5a0-607e997bbaf6-kube-api-access-fd47d\") pod \"nmstate-webhook-5f6d4c5ccb-hnhsj\" (UID: \"a69b0d1e-d485-4424-a5a0-607e997bbaf6\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.863544 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/3dfc52ec-c1e6-4162-bccf-6fb5a855212f-ovs-socket\") pod \"nmstate-handler-4mtzb\" (UID: \"3dfc52ec-c1e6-4162-bccf-6fb5a855212f\") " pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.863575 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsxvl\" (UniqueName: \"kubernetes.io/projected/bd325177-e3fc-476e-b59f-363f1bc2fe0a-kube-api-access-gsxvl\") pod \"nmstate-console-plugin-7fbb5f6569-qgl65\" (UID: \"bd325177-e3fc-476e-b59f-363f1bc2fe0a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.863601 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xq9fr\" (UniqueName: \"kubernetes.io/projected/3dfc52ec-c1e6-4162-bccf-6fb5a855212f-kube-api-access-xq9fr\") pod \"nmstate-handler-4mtzb\" (UID: \"3dfc52ec-c1e6-4162-bccf-6fb5a855212f\") " pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.863914 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/3dfc52ec-c1e6-4162-bccf-6fb5a855212f-nmstate-lock\") pod \"nmstate-handler-4mtzb\" (UID: \"3dfc52ec-c1e6-4162-bccf-6fb5a855212f\") " pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.864008 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/3dfc52ec-c1e6-4162-bccf-6fb5a855212f-ovs-socket\") pod \"nmstate-handler-4mtzb\" (UID: \"3dfc52ec-c1e6-4162-bccf-6fb5a855212f\") " pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:38:56 crc kubenswrapper[4612]: E1203 07:38:56.864123 4612 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 03 07:38:56 crc kubenswrapper[4612]: E1203 07:38:56.864216 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a69b0d1e-d485-4424-a5a0-607e997bbaf6-tls-key-pair podName:a69b0d1e-d485-4424-a5a0-607e997bbaf6 nodeName:}" failed. No retries permitted until 2025-12-03 07:38:57.364190232 +0000 UTC m=+700.537547662 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/a69b0d1e-d485-4424-a5a0-607e997bbaf6-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-hnhsj" (UID: "a69b0d1e-d485-4424-a5a0-607e997bbaf6") : secret "openshift-nmstate-webhook" not found Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.864250 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/3dfc52ec-c1e6-4162-bccf-6fb5a855212f-dbus-socket\") pod \"nmstate-handler-4mtzb\" (UID: \"3dfc52ec-c1e6-4162-bccf-6fb5a855212f\") " pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.890924 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xq9fr\" (UniqueName: \"kubernetes.io/projected/3dfc52ec-c1e6-4162-bccf-6fb5a855212f-kube-api-access-xq9fr\") pod \"nmstate-handler-4mtzb\" (UID: \"3dfc52ec-c1e6-4162-bccf-6fb5a855212f\") " pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.895110 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fd47d\" (UniqueName: \"kubernetes.io/projected/a69b0d1e-d485-4424-a5a0-607e997bbaf6-kube-api-access-fd47d\") pod \"nmstate-webhook-5f6d4c5ccb-hnhsj\" (UID: \"a69b0d1e-d485-4424-a5a0-607e997bbaf6\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.900162 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kzzq\" (UniqueName: \"kubernetes.io/projected/0eea55b7-ecfe-4fc9-bff2-061da172743a-kube-api-access-9kzzq\") pod \"nmstate-metrics-7f946cbc9-7dp5q\" (UID: \"0eea55b7-ecfe-4fc9-bff2-061da172743a\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-7dp5q" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.965153 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsxvl\" (UniqueName: \"kubernetes.io/projected/bd325177-e3fc-476e-b59f-363f1bc2fe0a-kube-api-access-gsxvl\") pod \"nmstate-console-plugin-7fbb5f6569-qgl65\" (UID: \"bd325177-e3fc-476e-b59f-363f1bc2fe0a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.965288 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/bd325177-e3fc-476e-b59f-363f1bc2fe0a-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-qgl65\" (UID: \"bd325177-e3fc-476e-b59f-363f1bc2fe0a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.965315 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/bd325177-e3fc-476e-b59f-363f1bc2fe0a-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-qgl65\" (UID: \"bd325177-e3fc-476e-b59f-363f1bc2fe0a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" Dec 03 07:38:56 crc kubenswrapper[4612]: E1203 07:38:56.965459 4612 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 03 07:38:56 crc kubenswrapper[4612]: E1203 07:38:56.965513 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bd325177-e3fc-476e-b59f-363f1bc2fe0a-plugin-serving-cert podName:bd325177-e3fc-476e-b59f-363f1bc2fe0a nodeName:}" failed. No retries permitted until 2025-12-03 07:38:57.465495592 +0000 UTC m=+700.638852992 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/bd325177-e3fc-476e-b59f-363f1bc2fe0a-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-qgl65" (UID: "bd325177-e3fc-476e-b59f-363f1bc2fe0a") : secret "plugin-serving-cert" not found Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.966833 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/bd325177-e3fc-476e-b59f-363f1bc2fe0a-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-qgl65\" (UID: \"bd325177-e3fc-476e-b59f-363f1bc2fe0a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" Dec 03 07:38:56 crc kubenswrapper[4612]: I1203 07:38:56.970526 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-7dp5q" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.007787 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsxvl\" (UniqueName: \"kubernetes.io/projected/bd325177-e3fc-476e-b59f-363f1bc2fe0a-kube-api-access-gsxvl\") pod \"nmstate-console-plugin-7fbb5f6569-qgl65\" (UID: \"bd325177-e3fc-476e-b59f-363f1bc2fe0a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.027853 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.139443 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-77b44546b8-jgmk7"] Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.150731 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.157217 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-77b44546b8-jgmk7"] Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.168506 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxvn5\" (UniqueName: \"kubernetes.io/projected/3af8d005-c0f8-450e-9735-f064dd6101e9-kube-api-access-qxvn5\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.168619 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3af8d005-c0f8-450e-9735-f064dd6101e9-console-serving-cert\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.168651 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3af8d005-c0f8-450e-9735-f064dd6101e9-service-ca\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.168691 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3af8d005-c0f8-450e-9735-f064dd6101e9-oauth-serving-cert\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.168711 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3af8d005-c0f8-450e-9735-f064dd6101e9-console-config\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.168728 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3af8d005-c0f8-450e-9735-f064dd6101e9-console-oauth-config\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.168747 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3af8d005-c0f8-450e-9735-f064dd6101e9-trusted-ca-bundle\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.270228 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3af8d005-c0f8-450e-9735-f064dd6101e9-console-config\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.270274 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3af8d005-c0f8-450e-9735-f064dd6101e9-console-oauth-config\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.270296 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3af8d005-c0f8-450e-9735-f064dd6101e9-trusted-ca-bundle\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.270335 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxvn5\" (UniqueName: \"kubernetes.io/projected/3af8d005-c0f8-450e-9735-f064dd6101e9-kube-api-access-qxvn5\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.270372 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3af8d005-c0f8-450e-9735-f064dd6101e9-console-serving-cert\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.270409 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3af8d005-c0f8-450e-9735-f064dd6101e9-service-ca\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.270447 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3af8d005-c0f8-450e-9735-f064dd6101e9-oauth-serving-cert\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.271225 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3af8d005-c0f8-450e-9735-f064dd6101e9-oauth-serving-cert\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.271701 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3af8d005-c0f8-450e-9735-f064dd6101e9-console-config\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.273357 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3af8d005-c0f8-450e-9735-f064dd6101e9-trusted-ca-bundle\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.274035 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3af8d005-c0f8-450e-9735-f064dd6101e9-service-ca\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.282902 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3af8d005-c0f8-450e-9735-f064dd6101e9-console-serving-cert\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.283773 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3af8d005-c0f8-450e-9735-f064dd6101e9-console-oauth-config\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.288251 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxvn5\" (UniqueName: \"kubernetes.io/projected/3af8d005-c0f8-450e-9735-f064dd6101e9-kube-api-access-qxvn5\") pod \"console-77b44546b8-jgmk7\" (UID: \"3af8d005-c0f8-450e-9735-f064dd6101e9\") " pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.320028 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-7dp5q"] Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.371500 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/a69b0d1e-d485-4424-a5a0-607e997bbaf6-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-hnhsj\" (UID: \"a69b0d1e-d485-4424-a5a0-607e997bbaf6\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.374847 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/a69b0d1e-d485-4424-a5a0-607e997bbaf6-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-hnhsj\" (UID: \"a69b0d1e-d485-4424-a5a0-607e997bbaf6\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.472987 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/bd325177-e3fc-476e-b59f-363f1bc2fe0a-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-qgl65\" (UID: \"bd325177-e3fc-476e-b59f-363f1bc2fe0a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.475808 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/bd325177-e3fc-476e-b59f-363f1bc2fe0a-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-qgl65\" (UID: \"bd325177-e3fc-476e-b59f-363f1bc2fe0a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.488288 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.581813 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.705823 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-7dp5q" event={"ID":"0eea55b7-ecfe-4fc9-bff2-061da172743a","Type":"ContainerStarted","Data":"ed43446210e54ac4f60545e94c0924d007074d74eafe010951ef050303007cf3"} Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.708092 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-77b44546b8-jgmk7"] Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.711679 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-4mtzb" event={"ID":"3dfc52ec-c1e6-4162-bccf-6fb5a855212f","Type":"ContainerStarted","Data":"03488204a3f379d3440ca21c4cb0c2694be1812174e9c055d99d630bedd95f9a"} Dec 03 07:38:57 crc kubenswrapper[4612]: W1203 07:38:57.715246 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3af8d005_c0f8_450e_9735_f064dd6101e9.slice/crio-b2a88a68a169173cc425df1e47e4e4080b84274a2494cd84c2ba1b3be398bd8a WatchSource:0}: Error finding container b2a88a68a169173cc425df1e47e4e4080b84274a2494cd84c2ba1b3be398bd8a: Status 404 returned error can't find the container with id b2a88a68a169173cc425df1e47e4e4080b84274a2494cd84c2ba1b3be398bd8a Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.769102 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" Dec 03 07:38:57 crc kubenswrapper[4612]: I1203 07:38:57.982909 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj"] Dec 03 07:38:58 crc kubenswrapper[4612]: I1203 07:38:58.176029 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65"] Dec 03 07:38:58 crc kubenswrapper[4612]: W1203 07:38:58.186545 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd325177_e3fc_476e_b59f_363f1bc2fe0a.slice/crio-2610b5556eadcdfc4dedefbe0c16ad9a5bb3e0f05951cccc40fa36703b533731 WatchSource:0}: Error finding container 2610b5556eadcdfc4dedefbe0c16ad9a5bb3e0f05951cccc40fa36703b533731: Status 404 returned error can't find the container with id 2610b5556eadcdfc4dedefbe0c16ad9a5bb3e0f05951cccc40fa36703b533731 Dec 03 07:38:58 crc kubenswrapper[4612]: I1203 07:38:58.719491 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-77b44546b8-jgmk7" event={"ID":"3af8d005-c0f8-450e-9735-f064dd6101e9","Type":"ContainerStarted","Data":"a60297f72801d5b118164b131a456c8576257d078d63ef18cadd6617cb195d78"} Dec 03 07:38:58 crc kubenswrapper[4612]: I1203 07:38:58.719531 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-77b44546b8-jgmk7" event={"ID":"3af8d005-c0f8-450e-9735-f064dd6101e9","Type":"ContainerStarted","Data":"b2a88a68a169173cc425df1e47e4e4080b84274a2494cd84c2ba1b3be398bd8a"} Dec 03 07:38:58 crc kubenswrapper[4612]: I1203 07:38:58.720568 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" event={"ID":"bd325177-e3fc-476e-b59f-363f1bc2fe0a","Type":"ContainerStarted","Data":"2610b5556eadcdfc4dedefbe0c16ad9a5bb3e0f05951cccc40fa36703b533731"} Dec 03 07:38:58 crc kubenswrapper[4612]: I1203 07:38:58.721297 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj" event={"ID":"a69b0d1e-d485-4424-a5a0-607e997bbaf6","Type":"ContainerStarted","Data":"f13e8363718635854e0bda6dd24ba015cf192098494f847beccd054cef810e87"} Dec 03 07:38:58 crc kubenswrapper[4612]: I1203 07:38:58.745569 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-77b44546b8-jgmk7" podStartSLOduration=1.745538511 podStartE2EDuration="1.745538511s" podCreationTimestamp="2025-12-03 07:38:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:38:58.740661229 +0000 UTC m=+701.914018649" watchObservedRunningTime="2025-12-03 07:38:58.745538511 +0000 UTC m=+701.918895911" Dec 03 07:39:00 crc kubenswrapper[4612]: I1203 07:39:00.736473 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-7dp5q" event={"ID":"0eea55b7-ecfe-4fc9-bff2-061da172743a","Type":"ContainerStarted","Data":"49a85becb689e1f5f50d874b60baa0bc3bb28126c9286b970687797f86ba8597"} Dec 03 07:39:00 crc kubenswrapper[4612]: I1203 07:39:00.738297 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj" event={"ID":"a69b0d1e-d485-4424-a5a0-607e997bbaf6","Type":"ContainerStarted","Data":"086c5be291285976bab9c930092df10d1e23d0cb62b8f1a375b8d055c7763fa5"} Dec 03 07:39:00 crc kubenswrapper[4612]: I1203 07:39:00.738335 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj" Dec 03 07:39:00 crc kubenswrapper[4612]: I1203 07:39:00.740027 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-4mtzb" event={"ID":"3dfc52ec-c1e6-4162-bccf-6fb5a855212f","Type":"ContainerStarted","Data":"045ce8fd5e9bb6c3ca8c726b5144e02e1cc54cee3dd7dc0f1b777689c9b27adb"} Dec 03 07:39:00 crc kubenswrapper[4612]: I1203 07:39:00.740417 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:39:00 crc kubenswrapper[4612]: I1203 07:39:00.755439 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj" podStartSLOduration=3.006466655 podStartE2EDuration="4.755423155s" podCreationTimestamp="2025-12-03 07:38:56 +0000 UTC" firstStartedPulling="2025-12-03 07:38:58.002509127 +0000 UTC m=+701.175866527" lastFinishedPulling="2025-12-03 07:38:59.751465627 +0000 UTC m=+702.924823027" observedRunningTime="2025-12-03 07:39:00.754558273 +0000 UTC m=+703.927915673" watchObservedRunningTime="2025-12-03 07:39:00.755423155 +0000 UTC m=+703.928780555" Dec 03 07:39:00 crc kubenswrapper[4612]: I1203 07:39:00.778733 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-4mtzb" podStartSLOduration=2.10824179 podStartE2EDuration="4.778715299s" podCreationTimestamp="2025-12-03 07:38:56 +0000 UTC" firstStartedPulling="2025-12-03 07:38:57.079588643 +0000 UTC m=+700.252946043" lastFinishedPulling="2025-12-03 07:38:59.750062152 +0000 UTC m=+702.923419552" observedRunningTime="2025-12-03 07:39:00.777247142 +0000 UTC m=+703.950604542" watchObservedRunningTime="2025-12-03 07:39:00.778715299 +0000 UTC m=+703.952072699" Dec 03 07:39:01 crc kubenswrapper[4612]: I1203 07:39:01.756812 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" event={"ID":"bd325177-e3fc-476e-b59f-363f1bc2fe0a","Type":"ContainerStarted","Data":"22df508f8bd2ec14fdc63237eb6631410f2dac7b4e70ba638f96400b7cb0172c"} Dec 03 07:39:01 crc kubenswrapper[4612]: I1203 07:39:01.772087 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qgl65" podStartSLOduration=3.01790914 podStartE2EDuration="5.772070149s" podCreationTimestamp="2025-12-03 07:38:56 +0000 UTC" firstStartedPulling="2025-12-03 07:38:58.188870271 +0000 UTC m=+701.362227671" lastFinishedPulling="2025-12-03 07:39:00.94303129 +0000 UTC m=+704.116388680" observedRunningTime="2025-12-03 07:39:01.771002532 +0000 UTC m=+704.944359942" watchObservedRunningTime="2025-12-03 07:39:01.772070149 +0000 UTC m=+704.945427559" Dec 03 07:39:02 crc kubenswrapper[4612]: I1203 07:39:02.766087 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-7dp5q" event={"ID":"0eea55b7-ecfe-4fc9-bff2-061da172743a","Type":"ContainerStarted","Data":"6f489f9610a38e9d51575c9cb604c6a0495296e55f3d3ad1cf9da4fc590dc49e"} Dec 03 07:39:02 crc kubenswrapper[4612]: I1203 07:39:02.794407 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-7dp5q" podStartSLOduration=2.122757634 podStartE2EDuration="6.794380506s" podCreationTimestamp="2025-12-03 07:38:56 +0000 UTC" firstStartedPulling="2025-12-03 07:38:57.325682175 +0000 UTC m=+700.499039575" lastFinishedPulling="2025-12-03 07:39:01.997305047 +0000 UTC m=+705.170662447" observedRunningTime="2025-12-03 07:39:02.789707119 +0000 UTC m=+705.963064549" watchObservedRunningTime="2025-12-03 07:39:02.794380506 +0000 UTC m=+705.967737946" Dec 03 07:39:07 crc kubenswrapper[4612]: I1203 07:39:07.067890 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-4mtzb" Dec 03 07:39:07 crc kubenswrapper[4612]: I1203 07:39:07.489428 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:39:07 crc kubenswrapper[4612]: I1203 07:39:07.489478 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:39:07 crc kubenswrapper[4612]: I1203 07:39:07.497435 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:39:07 crc kubenswrapper[4612]: I1203 07:39:07.806266 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-77b44546b8-jgmk7" Dec 03 07:39:07 crc kubenswrapper[4612]: I1203 07:39:07.880407 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-477tn"] Dec 03 07:39:17 crc kubenswrapper[4612]: I1203 07:39:17.136373 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:39:17 crc kubenswrapper[4612]: I1203 07:39:17.136614 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:39:17 crc kubenswrapper[4612]: I1203 07:39:17.591397 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hnhsj" Dec 03 07:39:30 crc kubenswrapper[4612]: I1203 07:39:30.978716 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72"] Dec 03 07:39:30 crc kubenswrapper[4612]: I1203 07:39:30.980407 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" Dec 03 07:39:30 crc kubenswrapper[4612]: I1203 07:39:30.982147 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 07:39:30 crc kubenswrapper[4612]: I1203 07:39:30.987345 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72"] Dec 03 07:39:31 crc kubenswrapper[4612]: I1203 07:39:31.109756 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2fe33ad3-592c-48e5-83ec-a919da42fd49-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72\" (UID: \"2fe33ad3-592c-48e5-83ec-a919da42fd49\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" Dec 03 07:39:31 crc kubenswrapper[4612]: I1203 07:39:31.109821 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2fe33ad3-592c-48e5-83ec-a919da42fd49-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72\" (UID: \"2fe33ad3-592c-48e5-83ec-a919da42fd49\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" Dec 03 07:39:31 crc kubenswrapper[4612]: I1203 07:39:31.109877 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp684\" (UniqueName: \"kubernetes.io/projected/2fe33ad3-592c-48e5-83ec-a919da42fd49-kube-api-access-sp684\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72\" (UID: \"2fe33ad3-592c-48e5-83ec-a919da42fd49\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" Dec 03 07:39:31 crc kubenswrapper[4612]: I1203 07:39:31.211283 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2fe33ad3-592c-48e5-83ec-a919da42fd49-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72\" (UID: \"2fe33ad3-592c-48e5-83ec-a919da42fd49\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" Dec 03 07:39:31 crc kubenswrapper[4612]: I1203 07:39:31.211356 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2fe33ad3-592c-48e5-83ec-a919da42fd49-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72\" (UID: \"2fe33ad3-592c-48e5-83ec-a919da42fd49\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" Dec 03 07:39:31 crc kubenswrapper[4612]: I1203 07:39:31.211425 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp684\" (UniqueName: \"kubernetes.io/projected/2fe33ad3-592c-48e5-83ec-a919da42fd49-kube-api-access-sp684\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72\" (UID: \"2fe33ad3-592c-48e5-83ec-a919da42fd49\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" Dec 03 07:39:31 crc kubenswrapper[4612]: I1203 07:39:31.212605 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2fe33ad3-592c-48e5-83ec-a919da42fd49-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72\" (UID: \"2fe33ad3-592c-48e5-83ec-a919da42fd49\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" Dec 03 07:39:31 crc kubenswrapper[4612]: I1203 07:39:31.212652 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2fe33ad3-592c-48e5-83ec-a919da42fd49-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72\" (UID: \"2fe33ad3-592c-48e5-83ec-a919da42fd49\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" Dec 03 07:39:31 crc kubenswrapper[4612]: I1203 07:39:31.238387 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp684\" (UniqueName: \"kubernetes.io/projected/2fe33ad3-592c-48e5-83ec-a919da42fd49-kube-api-access-sp684\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72\" (UID: \"2fe33ad3-592c-48e5-83ec-a919da42fd49\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" Dec 03 07:39:31 crc kubenswrapper[4612]: I1203 07:39:31.298961 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" Dec 03 07:39:31 crc kubenswrapper[4612]: I1203 07:39:31.704436 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72"] Dec 03 07:39:31 crc kubenswrapper[4612]: I1203 07:39:31.952771 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" event={"ID":"2fe33ad3-592c-48e5-83ec-a919da42fd49","Type":"ContainerStarted","Data":"9dcf6b0f92eba5c92340e2967e64215f2034a009c5c2ce5ce4f2f87fa3e10c53"} Dec 03 07:39:31 crc kubenswrapper[4612]: I1203 07:39:31.953150 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" event={"ID":"2fe33ad3-592c-48e5-83ec-a919da42fd49","Type":"ContainerStarted","Data":"a0918cdc1aaf3208c039491627c5a0a774649c1849bf233567aa678904bea2e7"} Dec 03 07:39:32 crc kubenswrapper[4612]: I1203 07:39:32.925554 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-477tn" podUID="6bae73b0-37ed-4404-935d-c9afce883fd2" containerName="console" containerID="cri-o://14619e93d6eba85bfbfad82d6a66621fb3b78c99c6319bc3e44a5106a7e5b486" gracePeriod=15 Dec 03 07:39:32 crc kubenswrapper[4612]: I1203 07:39:32.959758 4612 generic.go:334] "Generic (PLEG): container finished" podID="2fe33ad3-592c-48e5-83ec-a919da42fd49" containerID="9dcf6b0f92eba5c92340e2967e64215f2034a009c5c2ce5ce4f2f87fa3e10c53" exitCode=0 Dec 03 07:39:32 crc kubenswrapper[4612]: I1203 07:39:32.959799 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" event={"ID":"2fe33ad3-592c-48e5-83ec-a919da42fd49","Type":"ContainerDied","Data":"9dcf6b0f92eba5c92340e2967e64215f2034a009c5c2ce5ce4f2f87fa3e10c53"} Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.289225 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-477tn_6bae73b0-37ed-4404-935d-c9afce883fd2/console/0.log" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.289523 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.445576 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6bae73b0-37ed-4404-935d-c9afce883fd2-console-oauth-config\") pod \"6bae73b0-37ed-4404-935d-c9afce883fd2\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.445668 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-service-ca\") pod \"6bae73b0-37ed-4404-935d-c9afce883fd2\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.445733 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-oauth-serving-cert\") pod \"6bae73b0-37ed-4404-935d-c9afce883fd2\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.445763 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-trusted-ca-bundle\") pod \"6bae73b0-37ed-4404-935d-c9afce883fd2\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.445830 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6bae73b0-37ed-4404-935d-c9afce883fd2-console-serving-cert\") pod \"6bae73b0-37ed-4404-935d-c9afce883fd2\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.445868 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-console-config\") pod \"6bae73b0-37ed-4404-935d-c9afce883fd2\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.445927 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jm9hg\" (UniqueName: \"kubernetes.io/projected/6bae73b0-37ed-4404-935d-c9afce883fd2-kube-api-access-jm9hg\") pod \"6bae73b0-37ed-4404-935d-c9afce883fd2\" (UID: \"6bae73b0-37ed-4404-935d-c9afce883fd2\") " Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.447725 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6bae73b0-37ed-4404-935d-c9afce883fd2" (UID: "6bae73b0-37ed-4404-935d-c9afce883fd2"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.447761 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-console-config" (OuterVolumeSpecName: "console-config") pod "6bae73b0-37ed-4404-935d-c9afce883fd2" (UID: "6bae73b0-37ed-4404-935d-c9afce883fd2"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.448063 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "6bae73b0-37ed-4404-935d-c9afce883fd2" (UID: "6bae73b0-37ed-4404-935d-c9afce883fd2"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.449795 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-service-ca" (OuterVolumeSpecName: "service-ca") pod "6bae73b0-37ed-4404-935d-c9afce883fd2" (UID: "6bae73b0-37ed-4404-935d-c9afce883fd2"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.452633 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bae73b0-37ed-4404-935d-c9afce883fd2-kube-api-access-jm9hg" (OuterVolumeSpecName: "kube-api-access-jm9hg") pod "6bae73b0-37ed-4404-935d-c9afce883fd2" (UID: "6bae73b0-37ed-4404-935d-c9afce883fd2"). InnerVolumeSpecName "kube-api-access-jm9hg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.453034 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bae73b0-37ed-4404-935d-c9afce883fd2-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "6bae73b0-37ed-4404-935d-c9afce883fd2" (UID: "6bae73b0-37ed-4404-935d-c9afce883fd2"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.453258 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bae73b0-37ed-4404-935d-c9afce883fd2-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "6bae73b0-37ed-4404-935d-c9afce883fd2" (UID: "6bae73b0-37ed-4404-935d-c9afce883fd2"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.547976 4612 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.548226 4612 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.548519 4612 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6bae73b0-37ed-4404-935d-c9afce883fd2-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.548581 4612 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.548639 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jm9hg\" (UniqueName: \"kubernetes.io/projected/6bae73b0-37ed-4404-935d-c9afce883fd2-kube-api-access-jm9hg\") on node \"crc\" DevicePath \"\"" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.548699 4612 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6bae73b0-37ed-4404-935d-c9afce883fd2-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.548749 4612 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6bae73b0-37ed-4404-935d-c9afce883fd2-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.966666 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-477tn_6bae73b0-37ed-4404-935d-c9afce883fd2/console/0.log" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.966718 4612 generic.go:334] "Generic (PLEG): container finished" podID="6bae73b0-37ed-4404-935d-c9afce883fd2" containerID="14619e93d6eba85bfbfad82d6a66621fb3b78c99c6319bc3e44a5106a7e5b486" exitCode=2 Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.966749 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-477tn" event={"ID":"6bae73b0-37ed-4404-935d-c9afce883fd2","Type":"ContainerDied","Data":"14619e93d6eba85bfbfad82d6a66621fb3b78c99c6319bc3e44a5106a7e5b486"} Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.966766 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-477tn" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.966773 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-477tn" event={"ID":"6bae73b0-37ed-4404-935d-c9afce883fd2","Type":"ContainerDied","Data":"445cda6a5ab58a8e694b61f2c55ee3339701c6da52f4b13aaf91892faa11c399"} Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.966788 4612 scope.go:117] "RemoveContainer" containerID="14619e93d6eba85bfbfad82d6a66621fb3b78c99c6319bc3e44a5106a7e5b486" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.982718 4612 scope.go:117] "RemoveContainer" containerID="14619e93d6eba85bfbfad82d6a66621fb3b78c99c6319bc3e44a5106a7e5b486" Dec 03 07:39:33 crc kubenswrapper[4612]: E1203 07:39:33.983099 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14619e93d6eba85bfbfad82d6a66621fb3b78c99c6319bc3e44a5106a7e5b486\": container with ID starting with 14619e93d6eba85bfbfad82d6a66621fb3b78c99c6319bc3e44a5106a7e5b486 not found: ID does not exist" containerID="14619e93d6eba85bfbfad82d6a66621fb3b78c99c6319bc3e44a5106a7e5b486" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.983135 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14619e93d6eba85bfbfad82d6a66621fb3b78c99c6319bc3e44a5106a7e5b486"} err="failed to get container status \"14619e93d6eba85bfbfad82d6a66621fb3b78c99c6319bc3e44a5106a7e5b486\": rpc error: code = NotFound desc = could not find container \"14619e93d6eba85bfbfad82d6a66621fb3b78c99c6319bc3e44a5106a7e5b486\": container with ID starting with 14619e93d6eba85bfbfad82d6a66621fb3b78c99c6319bc3e44a5106a7e5b486 not found: ID does not exist" Dec 03 07:39:33 crc kubenswrapper[4612]: I1203 07:39:33.997978 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-477tn"] Dec 03 07:39:34 crc kubenswrapper[4612]: I1203 07:39:34.002962 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-477tn"] Dec 03 07:39:34 crc kubenswrapper[4612]: I1203 07:39:34.975472 4612 generic.go:334] "Generic (PLEG): container finished" podID="2fe33ad3-592c-48e5-83ec-a919da42fd49" containerID="ca9bcdaf607539daf41505551fe0cbfd4e9bfe273c8cde50e82486bc405980c5" exitCode=0 Dec 03 07:39:34 crc kubenswrapper[4612]: I1203 07:39:34.975574 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" event={"ID":"2fe33ad3-592c-48e5-83ec-a919da42fd49","Type":"ContainerDied","Data":"ca9bcdaf607539daf41505551fe0cbfd4e9bfe273c8cde50e82486bc405980c5"} Dec 03 07:39:35 crc kubenswrapper[4612]: I1203 07:39:35.100511 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bae73b0-37ed-4404-935d-c9afce883fd2" path="/var/lib/kubelet/pods/6bae73b0-37ed-4404-935d-c9afce883fd2/volumes" Dec 03 07:39:35 crc kubenswrapper[4612]: I1203 07:39:35.983855 4612 generic.go:334] "Generic (PLEG): container finished" podID="2fe33ad3-592c-48e5-83ec-a919da42fd49" containerID="1040852112b71cf99f9957a592c3a0bd1afc1220c20b469fd3742218b0779f81" exitCode=0 Dec 03 07:39:35 crc kubenswrapper[4612]: I1203 07:39:35.984050 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" event={"ID":"2fe33ad3-592c-48e5-83ec-a919da42fd49","Type":"ContainerDied","Data":"1040852112b71cf99f9957a592c3a0bd1afc1220c20b469fd3742218b0779f81"} Dec 03 07:39:37 crc kubenswrapper[4612]: I1203 07:39:37.252793 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" Dec 03 07:39:37 crc kubenswrapper[4612]: I1203 07:39:37.396333 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sp684\" (UniqueName: \"kubernetes.io/projected/2fe33ad3-592c-48e5-83ec-a919da42fd49-kube-api-access-sp684\") pod \"2fe33ad3-592c-48e5-83ec-a919da42fd49\" (UID: \"2fe33ad3-592c-48e5-83ec-a919da42fd49\") " Dec 03 07:39:37 crc kubenswrapper[4612]: I1203 07:39:37.396791 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2fe33ad3-592c-48e5-83ec-a919da42fd49-util\") pod \"2fe33ad3-592c-48e5-83ec-a919da42fd49\" (UID: \"2fe33ad3-592c-48e5-83ec-a919da42fd49\") " Dec 03 07:39:37 crc kubenswrapper[4612]: I1203 07:39:37.398076 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2fe33ad3-592c-48e5-83ec-a919da42fd49-bundle\") pod \"2fe33ad3-592c-48e5-83ec-a919da42fd49\" (UID: \"2fe33ad3-592c-48e5-83ec-a919da42fd49\") " Dec 03 07:39:37 crc kubenswrapper[4612]: I1203 07:39:37.399449 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fe33ad3-592c-48e5-83ec-a919da42fd49-bundle" (OuterVolumeSpecName: "bundle") pod "2fe33ad3-592c-48e5-83ec-a919da42fd49" (UID: "2fe33ad3-592c-48e5-83ec-a919da42fd49"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:39:37 crc kubenswrapper[4612]: I1203 07:39:37.405337 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fe33ad3-592c-48e5-83ec-a919da42fd49-kube-api-access-sp684" (OuterVolumeSpecName: "kube-api-access-sp684") pod "2fe33ad3-592c-48e5-83ec-a919da42fd49" (UID: "2fe33ad3-592c-48e5-83ec-a919da42fd49"). InnerVolumeSpecName "kube-api-access-sp684". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:39:37 crc kubenswrapper[4612]: I1203 07:39:37.409103 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fe33ad3-592c-48e5-83ec-a919da42fd49-util" (OuterVolumeSpecName: "util") pod "2fe33ad3-592c-48e5-83ec-a919da42fd49" (UID: "2fe33ad3-592c-48e5-83ec-a919da42fd49"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:39:37 crc kubenswrapper[4612]: I1203 07:39:37.500364 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sp684\" (UniqueName: \"kubernetes.io/projected/2fe33ad3-592c-48e5-83ec-a919da42fd49-kube-api-access-sp684\") on node \"crc\" DevicePath \"\"" Dec 03 07:39:37 crc kubenswrapper[4612]: I1203 07:39:37.500398 4612 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2fe33ad3-592c-48e5-83ec-a919da42fd49-util\") on node \"crc\" DevicePath \"\"" Dec 03 07:39:37 crc kubenswrapper[4612]: I1203 07:39:37.500411 4612 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2fe33ad3-592c-48e5-83ec-a919da42fd49-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:39:38 crc kubenswrapper[4612]: I1203 07:39:38.001498 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" event={"ID":"2fe33ad3-592c-48e5-83ec-a919da42fd49","Type":"ContainerDied","Data":"a0918cdc1aaf3208c039491627c5a0a774649c1849bf233567aa678904bea2e7"} Dec 03 07:39:38 crc kubenswrapper[4612]: I1203 07:39:38.001546 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0918cdc1aaf3208c039491627c5a0a774649c1849bf233567aa678904bea2e7" Dec 03 07:39:38 crc kubenswrapper[4612]: I1203 07:39:38.001617 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.050856 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn"] Dec 03 07:39:46 crc kubenswrapper[4612]: E1203 07:39:46.051759 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fe33ad3-592c-48e5-83ec-a919da42fd49" containerName="pull" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.051771 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fe33ad3-592c-48e5-83ec-a919da42fd49" containerName="pull" Dec 03 07:39:46 crc kubenswrapper[4612]: E1203 07:39:46.051788 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fe33ad3-592c-48e5-83ec-a919da42fd49" containerName="util" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.051795 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fe33ad3-592c-48e5-83ec-a919da42fd49" containerName="util" Dec 03 07:39:46 crc kubenswrapper[4612]: E1203 07:39:46.051805 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fe33ad3-592c-48e5-83ec-a919da42fd49" containerName="extract" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.051811 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fe33ad3-592c-48e5-83ec-a919da42fd49" containerName="extract" Dec 03 07:39:46 crc kubenswrapper[4612]: E1203 07:39:46.051825 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bae73b0-37ed-4404-935d-c9afce883fd2" containerName="console" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.051830 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bae73b0-37ed-4404-935d-c9afce883fd2" containerName="console" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.051916 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bae73b0-37ed-4404-935d-c9afce883fd2" containerName="console" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.051926 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fe33ad3-592c-48e5-83ec-a919da42fd49" containerName="extract" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.052290 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.059878 4612 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.060129 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.060152 4612 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-69dx4" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.060227 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.060457 4612 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.081219 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn"] Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.209811 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgtpm\" (UniqueName: \"kubernetes.io/projected/bd467808-b93d-47ee-bdf3-8e6f29bf3506-kube-api-access-vgtpm\") pod \"metallb-operator-controller-manager-67fc746f5d-s9gvn\" (UID: \"bd467808-b93d-47ee-bdf3-8e6f29bf3506\") " pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.209871 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bd467808-b93d-47ee-bdf3-8e6f29bf3506-apiservice-cert\") pod \"metallb-operator-controller-manager-67fc746f5d-s9gvn\" (UID: \"bd467808-b93d-47ee-bdf3-8e6f29bf3506\") " pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.209911 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bd467808-b93d-47ee-bdf3-8e6f29bf3506-webhook-cert\") pod \"metallb-operator-controller-manager-67fc746f5d-s9gvn\" (UID: \"bd467808-b93d-47ee-bdf3-8e6f29bf3506\") " pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.278654 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc"] Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.279441 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.282881 4612 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.283068 4612 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.283178 4612 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-n5z52" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.299836 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc"] Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.311066 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgtpm\" (UniqueName: \"kubernetes.io/projected/bd467808-b93d-47ee-bdf3-8e6f29bf3506-kube-api-access-vgtpm\") pod \"metallb-operator-controller-manager-67fc746f5d-s9gvn\" (UID: \"bd467808-b93d-47ee-bdf3-8e6f29bf3506\") " pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.311118 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bd467808-b93d-47ee-bdf3-8e6f29bf3506-apiservice-cert\") pod \"metallb-operator-controller-manager-67fc746f5d-s9gvn\" (UID: \"bd467808-b93d-47ee-bdf3-8e6f29bf3506\") " pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.311159 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bd467808-b93d-47ee-bdf3-8e6f29bf3506-webhook-cert\") pod \"metallb-operator-controller-manager-67fc746f5d-s9gvn\" (UID: \"bd467808-b93d-47ee-bdf3-8e6f29bf3506\") " pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.316742 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bd467808-b93d-47ee-bdf3-8e6f29bf3506-webhook-cert\") pod \"metallb-operator-controller-manager-67fc746f5d-s9gvn\" (UID: \"bd467808-b93d-47ee-bdf3-8e6f29bf3506\") " pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.316751 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bd467808-b93d-47ee-bdf3-8e6f29bf3506-apiservice-cert\") pod \"metallb-operator-controller-manager-67fc746f5d-s9gvn\" (UID: \"bd467808-b93d-47ee-bdf3-8e6f29bf3506\") " pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.343814 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgtpm\" (UniqueName: \"kubernetes.io/projected/bd467808-b93d-47ee-bdf3-8e6f29bf3506-kube-api-access-vgtpm\") pod \"metallb-operator-controller-manager-67fc746f5d-s9gvn\" (UID: \"bd467808-b93d-47ee-bdf3-8e6f29bf3506\") " pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.376427 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.418661 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp6nh\" (UniqueName: \"kubernetes.io/projected/ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d-kube-api-access-tp6nh\") pod \"metallb-operator-webhook-server-6b85868c59-dznbc\" (UID: \"ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d\") " pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.418705 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d-webhook-cert\") pod \"metallb-operator-webhook-server-6b85868c59-dznbc\" (UID: \"ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d\") " pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.418734 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d-apiservice-cert\") pod \"metallb-operator-webhook-server-6b85868c59-dznbc\" (UID: \"ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d\") " pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.521214 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp6nh\" (UniqueName: \"kubernetes.io/projected/ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d-kube-api-access-tp6nh\") pod \"metallb-operator-webhook-server-6b85868c59-dznbc\" (UID: \"ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d\") " pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.521488 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d-webhook-cert\") pod \"metallb-operator-webhook-server-6b85868c59-dznbc\" (UID: \"ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d\") " pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.521514 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d-apiservice-cert\") pod \"metallb-operator-webhook-server-6b85868c59-dznbc\" (UID: \"ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d\") " pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.529620 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d-apiservice-cert\") pod \"metallb-operator-webhook-server-6b85868c59-dznbc\" (UID: \"ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d\") " pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.538518 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d-webhook-cert\") pod \"metallb-operator-webhook-server-6b85868c59-dznbc\" (UID: \"ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d\") " pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.546913 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp6nh\" (UniqueName: \"kubernetes.io/projected/ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d-kube-api-access-tp6nh\") pod \"metallb-operator-webhook-server-6b85868c59-dznbc\" (UID: \"ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d\") " pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.595190 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" Dec 03 07:39:46 crc kubenswrapper[4612]: I1203 07:39:46.613312 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn"] Dec 03 07:39:46 crc kubenswrapper[4612]: W1203 07:39:46.625248 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd467808_b93d_47ee_bdf3_8e6f29bf3506.slice/crio-17a7b499c6038671779bf58d8974bf767da4bb021e078a937a80e2951c72e5fc WatchSource:0}: Error finding container 17a7b499c6038671779bf58d8974bf767da4bb021e078a937a80e2951c72e5fc: Status 404 returned error can't find the container with id 17a7b499c6038671779bf58d8974bf767da4bb021e078a937a80e2951c72e5fc Dec 03 07:39:47 crc kubenswrapper[4612]: I1203 07:39:47.061088 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" event={"ID":"bd467808-b93d-47ee-bdf3-8e6f29bf3506","Type":"ContainerStarted","Data":"17a7b499c6038671779bf58d8974bf767da4bb021e078a937a80e2951c72e5fc"} Dec 03 07:39:47 crc kubenswrapper[4612]: I1203 07:39:47.125893 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc"] Dec 03 07:39:47 crc kubenswrapper[4612]: I1203 07:39:47.135521 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:39:47 crc kubenswrapper[4612]: I1203 07:39:47.135564 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:39:47 crc kubenswrapper[4612]: W1203 07:39:47.135962 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac4cb1e7_b3c7_4b39_a038_49071cb6ac2d.slice/crio-e68dd52f1a72f7a6a3fa450196da080a089ff8ca5a4ccf2fc98d4561a957033c WatchSource:0}: Error finding container e68dd52f1a72f7a6a3fa450196da080a089ff8ca5a4ccf2fc98d4561a957033c: Status 404 returned error can't find the container with id e68dd52f1a72f7a6a3fa450196da080a089ff8ca5a4ccf2fc98d4561a957033c Dec 03 07:39:48 crc kubenswrapper[4612]: I1203 07:39:48.067576 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" event={"ID":"ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d","Type":"ContainerStarted","Data":"e68dd52f1a72f7a6a3fa450196da080a089ff8ca5a4ccf2fc98d4561a957033c"} Dec 03 07:39:54 crc kubenswrapper[4612]: I1203 07:39:54.552128 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" event={"ID":"bd467808-b93d-47ee-bdf3-8e6f29bf3506","Type":"ContainerStarted","Data":"a13d62dd519fd892456bae95916e7d8fa5a75c29bc3443b787628d07443f6d5a"} Dec 03 07:39:54 crc kubenswrapper[4612]: I1203 07:39:54.552424 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" Dec 03 07:39:54 crc kubenswrapper[4612]: I1203 07:39:54.554984 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" event={"ID":"ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d","Type":"ContainerStarted","Data":"62784e8eb0f2f520e92247d966d1fccd38649cb282c1bf2d2f70e722893dcc72"} Dec 03 07:39:54 crc kubenswrapper[4612]: I1203 07:39:54.555154 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" Dec 03 07:39:54 crc kubenswrapper[4612]: I1203 07:39:54.573734 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" podStartSLOduration=1.280200737 podStartE2EDuration="8.57371279s" podCreationTimestamp="2025-12-03 07:39:46 +0000 UTC" firstStartedPulling="2025-12-03 07:39:46.630465445 +0000 UTC m=+749.803822845" lastFinishedPulling="2025-12-03 07:39:53.923977498 +0000 UTC m=+757.097334898" observedRunningTime="2025-12-03 07:39:54.571662639 +0000 UTC m=+757.745020069" watchObservedRunningTime="2025-12-03 07:39:54.57371279 +0000 UTC m=+757.747070230" Dec 03 07:39:54 crc kubenswrapper[4612]: I1203 07:39:54.600602 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" podStartSLOduration=1.800274513 podStartE2EDuration="8.600583313s" podCreationTimestamp="2025-12-03 07:39:46 +0000 UTC" firstStartedPulling="2025-12-03 07:39:47.138822818 +0000 UTC m=+750.312180218" lastFinishedPulling="2025-12-03 07:39:53.939131618 +0000 UTC m=+757.112489018" observedRunningTime="2025-12-03 07:39:54.595547307 +0000 UTC m=+757.768904777" watchObservedRunningTime="2025-12-03 07:39:54.600583313 +0000 UTC m=+757.773940713" Dec 03 07:40:00 crc kubenswrapper[4612]: I1203 07:40:00.504460 4612 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 07:40:06 crc kubenswrapper[4612]: I1203 07:40:06.610342 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6b85868c59-dznbc" Dec 03 07:40:17 crc kubenswrapper[4612]: I1203 07:40:17.135748 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:40:17 crc kubenswrapper[4612]: I1203 07:40:17.136270 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:40:17 crc kubenswrapper[4612]: I1203 07:40:17.136318 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:40:17 crc kubenswrapper[4612]: I1203 07:40:17.136855 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6eb537965b8dc9beeeb70ed8225f9d3e2c9c9ba317f26825107f0eb87a41a235"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 07:40:17 crc kubenswrapper[4612]: I1203 07:40:17.136913 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://6eb537965b8dc9beeeb70ed8225f9d3e2c9c9ba317f26825107f0eb87a41a235" gracePeriod=600 Dec 03 07:40:17 crc kubenswrapper[4612]: I1203 07:40:17.688018 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="6eb537965b8dc9beeeb70ed8225f9d3e2c9c9ba317f26825107f0eb87a41a235" exitCode=0 Dec 03 07:40:17 crc kubenswrapper[4612]: I1203 07:40:17.688073 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"6eb537965b8dc9beeeb70ed8225f9d3e2c9c9ba317f26825107f0eb87a41a235"} Dec 03 07:40:17 crc kubenswrapper[4612]: I1203 07:40:17.688530 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"253ee07d7cd8000b3306a0841b31f595ab9d1ea323b7f796e6790764b3205b1e"} Dec 03 07:40:17 crc kubenswrapper[4612]: I1203 07:40:17.688570 4612 scope.go:117] "RemoveContainer" containerID="a6d9c29b43953a091e80d49e0c054aba4416e3baeed2e1789edf4b26a4a652ee" Dec 03 07:40:26 crc kubenswrapper[4612]: I1203 07:40:26.379688 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-67fc746f5d-s9gvn" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.145303 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-djbsb"] Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.148914 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.149641 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284"] Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.150737 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.152317 4612 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-9lbmn" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.152454 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.152516 4612 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.155858 4612 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.164475 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284"] Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.268137 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-6wm95"] Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.271075 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-6wm95" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.275251 4612 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-br8ms" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.275478 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.276212 4612 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.277254 4612 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.285460 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/77aa8be3-2081-4ed8-8507-3f466dbef21c-metrics\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.285538 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rppgp\" (UniqueName: \"kubernetes.io/projected/fd2ac083-0876-4383-ba05-5493cd25e480-kube-api-access-rppgp\") pod \"frr-k8s-webhook-server-7fcb986d4-5r284\" (UID: \"fd2ac083-0876-4383-ba05-5493cd25e480\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.285570 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/77aa8be3-2081-4ed8-8507-3f466dbef21c-frr-startup\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.285637 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/77aa8be3-2081-4ed8-8507-3f466dbef21c-reloader\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.285693 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/77aa8be3-2081-4ed8-8507-3f466dbef21c-frr-conf\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.285730 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/77aa8be3-2081-4ed8-8507-3f466dbef21c-frr-sockets\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.285785 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd2ac083-0876-4383-ba05-5493cd25e480-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-5r284\" (UID: \"fd2ac083-0876-4383-ba05-5493cd25e480\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.285913 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5knz6\" (UniqueName: \"kubernetes.io/projected/77aa8be3-2081-4ed8-8507-3f466dbef21c-kube-api-access-5knz6\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.285972 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/77aa8be3-2081-4ed8-8507-3f466dbef21c-metrics-certs\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.321670 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-gnqmc"] Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.322488 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-gnqmc" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.331829 4612 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.343900 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-gnqmc"] Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.388109 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5knz6\" (UniqueName: \"kubernetes.io/projected/77aa8be3-2081-4ed8-8507-3f466dbef21c-kube-api-access-5knz6\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.388192 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/77aa8be3-2081-4ed8-8507-3f466dbef21c-metrics-certs\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.388233 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/77aa8be3-2081-4ed8-8507-3f466dbef21c-metrics\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.388260 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/77aa8be3-2081-4ed8-8507-3f466dbef21c-frr-startup\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.388377 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rppgp\" (UniqueName: \"kubernetes.io/projected/fd2ac083-0876-4383-ba05-5493cd25e480-kube-api-access-rppgp\") pod \"frr-k8s-webhook-server-7fcb986d4-5r284\" (UID: \"fd2ac083-0876-4383-ba05-5493cd25e480\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.388431 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/77aa8be3-2081-4ed8-8507-3f466dbef21c-reloader\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.388458 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/410e9076-5ebb-45a7-880a-77bffe01911b-metallb-excludel2\") pod \"speaker-6wm95\" (UID: \"410e9076-5ebb-45a7-880a-77bffe01911b\") " pod="metallb-system/speaker-6wm95" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.388492 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/410e9076-5ebb-45a7-880a-77bffe01911b-metrics-certs\") pod \"speaker-6wm95\" (UID: \"410e9076-5ebb-45a7-880a-77bffe01911b\") " pod="metallb-system/speaker-6wm95" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.388517 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/77aa8be3-2081-4ed8-8507-3f466dbef21c-frr-conf\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.388540 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/77aa8be3-2081-4ed8-8507-3f466dbef21c-frr-sockets\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.388580 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd2ac083-0876-4383-ba05-5493cd25e480-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-5r284\" (UID: \"fd2ac083-0876-4383-ba05-5493cd25e480\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.388614 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/410e9076-5ebb-45a7-880a-77bffe01911b-memberlist\") pod \"speaker-6wm95\" (UID: \"410e9076-5ebb-45a7-880a-77bffe01911b\") " pod="metallb-system/speaker-6wm95" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.388640 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rc6q\" (UniqueName: \"kubernetes.io/projected/410e9076-5ebb-45a7-880a-77bffe01911b-kube-api-access-4rc6q\") pod \"speaker-6wm95\" (UID: \"410e9076-5ebb-45a7-880a-77bffe01911b\") " pod="metallb-system/speaker-6wm95" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.388700 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/77aa8be3-2081-4ed8-8507-3f466dbef21c-metrics\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: E1203 07:40:27.388863 4612 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 03 07:40:27 crc kubenswrapper[4612]: E1203 07:40:27.388920 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd2ac083-0876-4383-ba05-5493cd25e480-cert podName:fd2ac083-0876-4383-ba05-5493cd25e480 nodeName:}" failed. No retries permitted until 2025-12-03 07:40:27.888900586 +0000 UTC m=+791.062257986 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fd2ac083-0876-4383-ba05-5493cd25e480-cert") pod "frr-k8s-webhook-server-7fcb986d4-5r284" (UID: "fd2ac083-0876-4383-ba05-5493cd25e480") : secret "frr-k8s-webhook-server-cert" not found Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.389001 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/77aa8be3-2081-4ed8-8507-3f466dbef21c-reloader\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.389399 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/77aa8be3-2081-4ed8-8507-3f466dbef21c-frr-sockets\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.389472 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/77aa8be3-2081-4ed8-8507-3f466dbef21c-frr-startup\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.389600 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/77aa8be3-2081-4ed8-8507-3f466dbef21c-frr-conf\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.404677 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/77aa8be3-2081-4ed8-8507-3f466dbef21c-metrics-certs\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.411784 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rppgp\" (UniqueName: \"kubernetes.io/projected/fd2ac083-0876-4383-ba05-5493cd25e480-kube-api-access-rppgp\") pod \"frr-k8s-webhook-server-7fcb986d4-5r284\" (UID: \"fd2ac083-0876-4383-ba05-5493cd25e480\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.414015 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5knz6\" (UniqueName: \"kubernetes.io/projected/77aa8be3-2081-4ed8-8507-3f466dbef21c-kube-api-access-5knz6\") pod \"frr-k8s-djbsb\" (UID: \"77aa8be3-2081-4ed8-8507-3f466dbef21c\") " pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.473402 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.489777 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/410e9076-5ebb-45a7-880a-77bffe01911b-metrics-certs\") pod \"speaker-6wm95\" (UID: \"410e9076-5ebb-45a7-880a-77bffe01911b\") " pod="metallb-system/speaker-6wm95" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.489844 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a982bc05-54be-4a0b-8e04-1e566601060d-cert\") pod \"controller-f8648f98b-gnqmc\" (UID: \"a982bc05-54be-4a0b-8e04-1e566601060d\") " pod="metallb-system/controller-f8648f98b-gnqmc" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.489901 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/410e9076-5ebb-45a7-880a-77bffe01911b-memberlist\") pod \"speaker-6wm95\" (UID: \"410e9076-5ebb-45a7-880a-77bffe01911b\") " pod="metallb-system/speaker-6wm95" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.489927 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxwdv\" (UniqueName: \"kubernetes.io/projected/a982bc05-54be-4a0b-8e04-1e566601060d-kube-api-access-wxwdv\") pod \"controller-f8648f98b-gnqmc\" (UID: \"a982bc05-54be-4a0b-8e04-1e566601060d\") " pod="metallb-system/controller-f8648f98b-gnqmc" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.489966 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rc6q\" (UniqueName: \"kubernetes.io/projected/410e9076-5ebb-45a7-880a-77bffe01911b-kube-api-access-4rc6q\") pod \"speaker-6wm95\" (UID: \"410e9076-5ebb-45a7-880a-77bffe01911b\") " pod="metallb-system/speaker-6wm95" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.489999 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a982bc05-54be-4a0b-8e04-1e566601060d-metrics-certs\") pod \"controller-f8648f98b-gnqmc\" (UID: \"a982bc05-54be-4a0b-8e04-1e566601060d\") " pod="metallb-system/controller-f8648f98b-gnqmc" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.490072 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/410e9076-5ebb-45a7-880a-77bffe01911b-metallb-excludel2\") pod \"speaker-6wm95\" (UID: \"410e9076-5ebb-45a7-880a-77bffe01911b\") " pod="metallb-system/speaker-6wm95" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.490908 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/410e9076-5ebb-45a7-880a-77bffe01911b-metallb-excludel2\") pod \"speaker-6wm95\" (UID: \"410e9076-5ebb-45a7-880a-77bffe01911b\") " pod="metallb-system/speaker-6wm95" Dec 03 07:40:27 crc kubenswrapper[4612]: E1203 07:40:27.491136 4612 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 07:40:27 crc kubenswrapper[4612]: E1203 07:40:27.491193 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/410e9076-5ebb-45a7-880a-77bffe01911b-memberlist podName:410e9076-5ebb-45a7-880a-77bffe01911b nodeName:}" failed. No retries permitted until 2025-12-03 07:40:27.99117765 +0000 UTC m=+791.164535050 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/410e9076-5ebb-45a7-880a-77bffe01911b-memberlist") pod "speaker-6wm95" (UID: "410e9076-5ebb-45a7-880a-77bffe01911b") : secret "metallb-memberlist" not found Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.493593 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/410e9076-5ebb-45a7-880a-77bffe01911b-metrics-certs\") pod \"speaker-6wm95\" (UID: \"410e9076-5ebb-45a7-880a-77bffe01911b\") " pod="metallb-system/speaker-6wm95" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.513248 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rc6q\" (UniqueName: \"kubernetes.io/projected/410e9076-5ebb-45a7-880a-77bffe01911b-kube-api-access-4rc6q\") pod \"speaker-6wm95\" (UID: \"410e9076-5ebb-45a7-880a-77bffe01911b\") " pod="metallb-system/speaker-6wm95" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.591558 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a982bc05-54be-4a0b-8e04-1e566601060d-cert\") pod \"controller-f8648f98b-gnqmc\" (UID: \"a982bc05-54be-4a0b-8e04-1e566601060d\") " pod="metallb-system/controller-f8648f98b-gnqmc" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.591651 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxwdv\" (UniqueName: \"kubernetes.io/projected/a982bc05-54be-4a0b-8e04-1e566601060d-kube-api-access-wxwdv\") pod \"controller-f8648f98b-gnqmc\" (UID: \"a982bc05-54be-4a0b-8e04-1e566601060d\") " pod="metallb-system/controller-f8648f98b-gnqmc" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.591682 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a982bc05-54be-4a0b-8e04-1e566601060d-metrics-certs\") pod \"controller-f8648f98b-gnqmc\" (UID: \"a982bc05-54be-4a0b-8e04-1e566601060d\") " pod="metallb-system/controller-f8648f98b-gnqmc" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.596224 4612 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.597613 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a982bc05-54be-4a0b-8e04-1e566601060d-metrics-certs\") pod \"controller-f8648f98b-gnqmc\" (UID: \"a982bc05-54be-4a0b-8e04-1e566601060d\") " pod="metallb-system/controller-f8648f98b-gnqmc" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.611921 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a982bc05-54be-4a0b-8e04-1e566601060d-cert\") pod \"controller-f8648f98b-gnqmc\" (UID: \"a982bc05-54be-4a0b-8e04-1e566601060d\") " pod="metallb-system/controller-f8648f98b-gnqmc" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.614457 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxwdv\" (UniqueName: \"kubernetes.io/projected/a982bc05-54be-4a0b-8e04-1e566601060d-kube-api-access-wxwdv\") pod \"controller-f8648f98b-gnqmc\" (UID: \"a982bc05-54be-4a0b-8e04-1e566601060d\") " pod="metallb-system/controller-f8648f98b-gnqmc" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.639763 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-gnqmc" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.763891 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-djbsb" event={"ID":"77aa8be3-2081-4ed8-8507-3f466dbef21c","Type":"ContainerStarted","Data":"27ca185efb9842a06804596cc15ba6914cfc307ef2f371d9ee3737670e467f1a"} Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.895549 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd2ac083-0876-4383-ba05-5493cd25e480-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-5r284\" (UID: \"fd2ac083-0876-4383-ba05-5493cd25e480\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.900334 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd2ac083-0876-4383-ba05-5493cd25e480-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-5r284\" (UID: \"fd2ac083-0876-4383-ba05-5493cd25e480\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284" Dec 03 07:40:27 crc kubenswrapper[4612]: I1203 07:40:27.997028 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/410e9076-5ebb-45a7-880a-77bffe01911b-memberlist\") pod \"speaker-6wm95\" (UID: \"410e9076-5ebb-45a7-880a-77bffe01911b\") " pod="metallb-system/speaker-6wm95" Dec 03 07:40:27 crc kubenswrapper[4612]: E1203 07:40:27.997217 4612 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 07:40:27 crc kubenswrapper[4612]: E1203 07:40:27.997266 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/410e9076-5ebb-45a7-880a-77bffe01911b-memberlist podName:410e9076-5ebb-45a7-880a-77bffe01911b nodeName:}" failed. No retries permitted until 2025-12-03 07:40:28.997251671 +0000 UTC m=+792.170609071 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/410e9076-5ebb-45a7-880a-77bffe01911b-memberlist") pod "speaker-6wm95" (UID: "410e9076-5ebb-45a7-880a-77bffe01911b") : secret "metallb-memberlist" not found Dec 03 07:40:28 crc kubenswrapper[4612]: I1203 07:40:28.084368 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284" Dec 03 07:40:28 crc kubenswrapper[4612]: I1203 07:40:28.103612 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-gnqmc"] Dec 03 07:40:28 crc kubenswrapper[4612]: W1203 07:40:28.114726 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda982bc05_54be_4a0b_8e04_1e566601060d.slice/crio-13c62da332bd6d77446dc88af989f0fa43903f88e223756c2cb3aaac03d142d1 WatchSource:0}: Error finding container 13c62da332bd6d77446dc88af989f0fa43903f88e223756c2cb3aaac03d142d1: Status 404 returned error can't find the container with id 13c62da332bd6d77446dc88af989f0fa43903f88e223756c2cb3aaac03d142d1 Dec 03 07:40:28 crc kubenswrapper[4612]: I1203 07:40:28.286420 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284"] Dec 03 07:40:28 crc kubenswrapper[4612]: W1203 07:40:28.290215 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd2ac083_0876_4383_ba05_5493cd25e480.slice/crio-3e6d6e05ce4e47688b24a64142a46a3ddf1a18e1c04bf58b955573070d4fa08b WatchSource:0}: Error finding container 3e6d6e05ce4e47688b24a64142a46a3ddf1a18e1c04bf58b955573070d4fa08b: Status 404 returned error can't find the container with id 3e6d6e05ce4e47688b24a64142a46a3ddf1a18e1c04bf58b955573070d4fa08b Dec 03 07:40:28 crc kubenswrapper[4612]: I1203 07:40:28.770431 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284" event={"ID":"fd2ac083-0876-4383-ba05-5493cd25e480","Type":"ContainerStarted","Data":"3e6d6e05ce4e47688b24a64142a46a3ddf1a18e1c04bf58b955573070d4fa08b"} Dec 03 07:40:28 crc kubenswrapper[4612]: I1203 07:40:28.771704 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-gnqmc" event={"ID":"a982bc05-54be-4a0b-8e04-1e566601060d","Type":"ContainerStarted","Data":"d335484028044affac90eebf0397810b679b5fc4447514ad92af84345426678f"} Dec 03 07:40:28 crc kubenswrapper[4612]: I1203 07:40:28.771733 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-gnqmc" event={"ID":"a982bc05-54be-4a0b-8e04-1e566601060d","Type":"ContainerStarted","Data":"af07bd65701ab5209d4e810f44ab320aae4fe43b9b550da0d143589ca1f5968f"} Dec 03 07:40:28 crc kubenswrapper[4612]: I1203 07:40:28.771748 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-gnqmc" event={"ID":"a982bc05-54be-4a0b-8e04-1e566601060d","Type":"ContainerStarted","Data":"13c62da332bd6d77446dc88af989f0fa43903f88e223756c2cb3aaac03d142d1"} Dec 03 07:40:28 crc kubenswrapper[4612]: I1203 07:40:28.772736 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-gnqmc" Dec 03 07:40:28 crc kubenswrapper[4612]: I1203 07:40:28.797046 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-gnqmc" podStartSLOduration=1.797023608 podStartE2EDuration="1.797023608s" podCreationTimestamp="2025-12-03 07:40:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:40:28.793230103 +0000 UTC m=+791.966587503" watchObservedRunningTime="2025-12-03 07:40:28.797023608 +0000 UTC m=+791.970381008" Dec 03 07:40:29 crc kubenswrapper[4612]: I1203 07:40:29.013989 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/410e9076-5ebb-45a7-880a-77bffe01911b-memberlist\") pod \"speaker-6wm95\" (UID: \"410e9076-5ebb-45a7-880a-77bffe01911b\") " pod="metallb-system/speaker-6wm95" Dec 03 07:40:29 crc kubenswrapper[4612]: I1203 07:40:29.031657 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/410e9076-5ebb-45a7-880a-77bffe01911b-memberlist\") pod \"speaker-6wm95\" (UID: \"410e9076-5ebb-45a7-880a-77bffe01911b\") " pod="metallb-system/speaker-6wm95" Dec 03 07:40:29 crc kubenswrapper[4612]: I1203 07:40:29.089111 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-6wm95" Dec 03 07:40:29 crc kubenswrapper[4612]: W1203 07:40:29.147341 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod410e9076_5ebb_45a7_880a_77bffe01911b.slice/crio-f22f01453966a95b28a1381cbbcbcc7a1c3e3d8ffd1ff823f89c372d1a72c817 WatchSource:0}: Error finding container f22f01453966a95b28a1381cbbcbcc7a1c3e3d8ffd1ff823f89c372d1a72c817: Status 404 returned error can't find the container with id f22f01453966a95b28a1381cbbcbcc7a1c3e3d8ffd1ff823f89c372d1a72c817 Dec 03 07:40:29 crc kubenswrapper[4612]: I1203 07:40:29.792407 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-6wm95" event={"ID":"410e9076-5ebb-45a7-880a-77bffe01911b","Type":"ContainerStarted","Data":"51025026ab258f01d2efb4a17d0d251dc3220afe1fd6d683cf16b5e870825530"} Dec 03 07:40:29 crc kubenswrapper[4612]: I1203 07:40:29.792804 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-6wm95" event={"ID":"410e9076-5ebb-45a7-880a-77bffe01911b","Type":"ContainerStarted","Data":"11e0de910e4e7588c12a84ac7c9ed3bc1c017c5042990360bb1b584e64e5a43d"} Dec 03 07:40:29 crc kubenswrapper[4612]: I1203 07:40:29.792823 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-6wm95" event={"ID":"410e9076-5ebb-45a7-880a-77bffe01911b","Type":"ContainerStarted","Data":"f22f01453966a95b28a1381cbbcbcc7a1c3e3d8ffd1ff823f89c372d1a72c817"} Dec 03 07:40:29 crc kubenswrapper[4612]: I1203 07:40:29.793807 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-6wm95" Dec 03 07:40:29 crc kubenswrapper[4612]: I1203 07:40:29.818625 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-6wm95" podStartSLOduration=2.818609926 podStartE2EDuration="2.818609926s" podCreationTimestamp="2025-12-03 07:40:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:40:29.813883818 +0000 UTC m=+792.987241218" watchObservedRunningTime="2025-12-03 07:40:29.818609926 +0000 UTC m=+792.991967316" Dec 03 07:40:35 crc kubenswrapper[4612]: I1203 07:40:35.832264 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284" event={"ID":"fd2ac083-0876-4383-ba05-5493cd25e480","Type":"ContainerStarted","Data":"77f0ac104fff177d5f23da9990f7dde1c036947c68e2400ca52b770e151609e5"} Dec 03 07:40:35 crc kubenswrapper[4612]: I1203 07:40:35.832807 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284" Dec 03 07:40:35 crc kubenswrapper[4612]: I1203 07:40:35.834799 4612 generic.go:334] "Generic (PLEG): container finished" podID="77aa8be3-2081-4ed8-8507-3f466dbef21c" containerID="fe17124917173a044fe39fefbce1c5eaeda32b3b6109e880a40b5d09bdf956f3" exitCode=0 Dec 03 07:40:35 crc kubenswrapper[4612]: I1203 07:40:35.834842 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-djbsb" event={"ID":"77aa8be3-2081-4ed8-8507-3f466dbef21c","Type":"ContainerDied","Data":"fe17124917173a044fe39fefbce1c5eaeda32b3b6109e880a40b5d09bdf956f3"} Dec 03 07:40:35 crc kubenswrapper[4612]: I1203 07:40:35.858308 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284" podStartSLOduration=1.524711936 podStartE2EDuration="8.858289346s" podCreationTimestamp="2025-12-03 07:40:27 +0000 UTC" firstStartedPulling="2025-12-03 07:40:28.29187671 +0000 UTC m=+791.465234110" lastFinishedPulling="2025-12-03 07:40:35.62545412 +0000 UTC m=+798.798811520" observedRunningTime="2025-12-03 07:40:35.854816529 +0000 UTC m=+799.028173969" watchObservedRunningTime="2025-12-03 07:40:35.858289346 +0000 UTC m=+799.031646746" Dec 03 07:40:36 crc kubenswrapper[4612]: I1203 07:40:36.845073 4612 generic.go:334] "Generic (PLEG): container finished" podID="77aa8be3-2081-4ed8-8507-3f466dbef21c" containerID="004d590c9d653c5f8bd70b775bc14e4427152791efc3d9cbe5d4789d2bf1d08f" exitCode=0 Dec 03 07:40:36 crc kubenswrapper[4612]: I1203 07:40:36.845176 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-djbsb" event={"ID":"77aa8be3-2081-4ed8-8507-3f466dbef21c","Type":"ContainerDied","Data":"004d590c9d653c5f8bd70b775bc14e4427152791efc3d9cbe5d4789d2bf1d08f"} Dec 03 07:40:37 crc kubenswrapper[4612]: I1203 07:40:37.860009 4612 generic.go:334] "Generic (PLEG): container finished" podID="77aa8be3-2081-4ed8-8507-3f466dbef21c" containerID="328eb9d1e00eb17c607ec6e872bcd83391b5756521b859210f7d693faa4d6436" exitCode=0 Dec 03 07:40:37 crc kubenswrapper[4612]: I1203 07:40:37.860360 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-djbsb" event={"ID":"77aa8be3-2081-4ed8-8507-3f466dbef21c","Type":"ContainerDied","Data":"328eb9d1e00eb17c607ec6e872bcd83391b5756521b859210f7d693faa4d6436"} Dec 03 07:40:38 crc kubenswrapper[4612]: I1203 07:40:38.870100 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-djbsb" event={"ID":"77aa8be3-2081-4ed8-8507-3f466dbef21c","Type":"ContainerStarted","Data":"77e300d2d00bd116f92c00d1d7385360c7b4e9f98572ce103f66e6cf51ba55a1"} Dec 03 07:40:38 crc kubenswrapper[4612]: I1203 07:40:38.870635 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-djbsb" event={"ID":"77aa8be3-2081-4ed8-8507-3f466dbef21c","Type":"ContainerStarted","Data":"69490eaec17a7a12fcb663779656786a2ec096881044dcaf1dbd96c89c3cbcf1"} Dec 03 07:40:38 crc kubenswrapper[4612]: I1203 07:40:38.870648 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-djbsb" event={"ID":"77aa8be3-2081-4ed8-8507-3f466dbef21c","Type":"ContainerStarted","Data":"4b46c7b4b0148672c280b431220153c7ffdc69a9374fb1fb43736456f703187e"} Dec 03 07:40:38 crc kubenswrapper[4612]: I1203 07:40:38.870666 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:38 crc kubenswrapper[4612]: I1203 07:40:38.870676 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-djbsb" event={"ID":"77aa8be3-2081-4ed8-8507-3f466dbef21c","Type":"ContainerStarted","Data":"244bc232bb8aece6b318828c1c08dbf640c2542c5335e9ad1aa965ce55e38c3e"} Dec 03 07:40:38 crc kubenswrapper[4612]: I1203 07:40:38.870687 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-djbsb" event={"ID":"77aa8be3-2081-4ed8-8507-3f466dbef21c","Type":"ContainerStarted","Data":"10903f4e9b47710bcbb4d5dfa46cdcbf47439836d3a244ad5c96c0ccef89959b"} Dec 03 07:40:38 crc kubenswrapper[4612]: I1203 07:40:38.870697 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-djbsb" event={"ID":"77aa8be3-2081-4ed8-8507-3f466dbef21c","Type":"ContainerStarted","Data":"26e3e05346c570dce1cdab26592ba25f54b5f203618c3ae814f6e29cd21e1ed1"} Dec 03 07:40:38 crc kubenswrapper[4612]: I1203 07:40:38.891839 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-djbsb" podStartSLOduration=3.926625071 podStartE2EDuration="11.891821318s" podCreationTimestamp="2025-12-03 07:40:27 +0000 UTC" firstStartedPulling="2025-12-03 07:40:27.641446923 +0000 UTC m=+790.814804323" lastFinishedPulling="2025-12-03 07:40:35.60664317 +0000 UTC m=+798.780000570" observedRunningTime="2025-12-03 07:40:38.890834634 +0000 UTC m=+802.064192044" watchObservedRunningTime="2025-12-03 07:40:38.891821318 +0000 UTC m=+802.065178728" Dec 03 07:40:39 crc kubenswrapper[4612]: I1203 07:40:39.100355 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-6wm95" Dec 03 07:40:41 crc kubenswrapper[4612]: I1203 07:40:41.997066 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-7d7xz"] Dec 03 07:40:41 crc kubenswrapper[4612]: I1203 07:40:41.998184 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-7d7xz" Dec 03 07:40:42 crc kubenswrapper[4612]: I1203 07:40:42.000762 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 03 07:40:42 crc kubenswrapper[4612]: I1203 07:40:42.000880 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-klgqs" Dec 03 07:40:42 crc kubenswrapper[4612]: I1203 07:40:42.001000 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 03 07:40:42 crc kubenswrapper[4612]: I1203 07:40:42.018600 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-7d7xz"] Dec 03 07:40:42 crc kubenswrapper[4612]: I1203 07:40:42.109452 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9tvw\" (UniqueName: \"kubernetes.io/projected/8e50dc6c-1b93-494a-9b06-f244b2ce2a63-kube-api-access-g9tvw\") pod \"openstack-operator-index-7d7xz\" (UID: \"8e50dc6c-1b93-494a-9b06-f244b2ce2a63\") " pod="openstack-operators/openstack-operator-index-7d7xz" Dec 03 07:40:42 crc kubenswrapper[4612]: I1203 07:40:42.211349 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9tvw\" (UniqueName: \"kubernetes.io/projected/8e50dc6c-1b93-494a-9b06-f244b2ce2a63-kube-api-access-g9tvw\") pod \"openstack-operator-index-7d7xz\" (UID: \"8e50dc6c-1b93-494a-9b06-f244b2ce2a63\") " pod="openstack-operators/openstack-operator-index-7d7xz" Dec 03 07:40:42 crc kubenswrapper[4612]: I1203 07:40:42.236691 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9tvw\" (UniqueName: \"kubernetes.io/projected/8e50dc6c-1b93-494a-9b06-f244b2ce2a63-kube-api-access-g9tvw\") pod \"openstack-operator-index-7d7xz\" (UID: \"8e50dc6c-1b93-494a-9b06-f244b2ce2a63\") " pod="openstack-operators/openstack-operator-index-7d7xz" Dec 03 07:40:42 crc kubenswrapper[4612]: I1203 07:40:42.322092 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-7d7xz" Dec 03 07:40:42 crc kubenswrapper[4612]: I1203 07:40:42.474355 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:42 crc kubenswrapper[4612]: I1203 07:40:42.522900 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:42 crc kubenswrapper[4612]: I1203 07:40:42.715007 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-7d7xz"] Dec 03 07:40:42 crc kubenswrapper[4612]: I1203 07:40:42.892286 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-7d7xz" event={"ID":"8e50dc6c-1b93-494a-9b06-f244b2ce2a63","Type":"ContainerStarted","Data":"65973338e671829c29796bbe5ea6a2b4f857d09c83b27a1f1fc145a3350de8a6"} Dec 03 07:40:45 crc kubenswrapper[4612]: I1203 07:40:45.384216 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-7d7xz"] Dec 03 07:40:45 crc kubenswrapper[4612]: I1203 07:40:45.990410 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-vf48w"] Dec 03 07:40:45 crc kubenswrapper[4612]: I1203 07:40:45.991624 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-vf48w" Dec 03 07:40:46 crc kubenswrapper[4612]: I1203 07:40:46.014050 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-vf48w"] Dec 03 07:40:46 crc kubenswrapper[4612]: I1203 07:40:46.162966 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g646b\" (UniqueName: \"kubernetes.io/projected/5c654dcd-f507-4d31-b3d7-7230cc7cb086-kube-api-access-g646b\") pod \"openstack-operator-index-vf48w\" (UID: \"5c654dcd-f507-4d31-b3d7-7230cc7cb086\") " pod="openstack-operators/openstack-operator-index-vf48w" Dec 03 07:40:46 crc kubenswrapper[4612]: I1203 07:40:46.264080 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g646b\" (UniqueName: \"kubernetes.io/projected/5c654dcd-f507-4d31-b3d7-7230cc7cb086-kube-api-access-g646b\") pod \"openstack-operator-index-vf48w\" (UID: \"5c654dcd-f507-4d31-b3d7-7230cc7cb086\") " pod="openstack-operators/openstack-operator-index-vf48w" Dec 03 07:40:46 crc kubenswrapper[4612]: I1203 07:40:46.283026 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g646b\" (UniqueName: \"kubernetes.io/projected/5c654dcd-f507-4d31-b3d7-7230cc7cb086-kube-api-access-g646b\") pod \"openstack-operator-index-vf48w\" (UID: \"5c654dcd-f507-4d31-b3d7-7230cc7cb086\") " pod="openstack-operators/openstack-operator-index-vf48w" Dec 03 07:40:46 crc kubenswrapper[4612]: I1203 07:40:46.312117 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-vf48w" Dec 03 07:40:46 crc kubenswrapper[4612]: I1203 07:40:46.741706 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-vf48w"] Dec 03 07:40:46 crc kubenswrapper[4612]: W1203 07:40:46.743872 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c654dcd_f507_4d31_b3d7_7230cc7cb086.slice/crio-471b309c36a945ec0d56adce4fbae0ee0d0fe304a62afc67f620581bada61b4a WatchSource:0}: Error finding container 471b309c36a945ec0d56adce4fbae0ee0d0fe304a62afc67f620581bada61b4a: Status 404 returned error can't find the container with id 471b309c36a945ec0d56adce4fbae0ee0d0fe304a62afc67f620581bada61b4a Dec 03 07:40:46 crc kubenswrapper[4612]: I1203 07:40:46.920220 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-vf48w" event={"ID":"5c654dcd-f507-4d31-b3d7-7230cc7cb086","Type":"ContainerStarted","Data":"471b309c36a945ec0d56adce4fbae0ee0d0fe304a62afc67f620581bada61b4a"} Dec 03 07:40:46 crc kubenswrapper[4612]: I1203 07:40:46.921725 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-7d7xz" event={"ID":"8e50dc6c-1b93-494a-9b06-f244b2ce2a63","Type":"ContainerStarted","Data":"2fadb4b482851c2dd7d2057251b4a24e9057c06645e9a83b9cccc22a4f4443dc"} Dec 03 07:40:46 crc kubenswrapper[4612]: I1203 07:40:46.921880 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-7d7xz" podUID="8e50dc6c-1b93-494a-9b06-f244b2ce2a63" containerName="registry-server" containerID="cri-o://2fadb4b482851c2dd7d2057251b4a24e9057c06645e9a83b9cccc22a4f4443dc" gracePeriod=2 Dec 03 07:40:46 crc kubenswrapper[4612]: I1203 07:40:46.941336 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-7d7xz" podStartSLOduration=2.812732686 podStartE2EDuration="5.941318522s" podCreationTimestamp="2025-12-03 07:40:41 +0000 UTC" firstStartedPulling="2025-12-03 07:40:42.721690993 +0000 UTC m=+805.895048393" lastFinishedPulling="2025-12-03 07:40:45.850276829 +0000 UTC m=+809.023634229" observedRunningTime="2025-12-03 07:40:46.936489691 +0000 UTC m=+810.109847081" watchObservedRunningTime="2025-12-03 07:40:46.941318522 +0000 UTC m=+810.114675922" Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.285650 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-7d7xz" Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.380494 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9tvw\" (UniqueName: \"kubernetes.io/projected/8e50dc6c-1b93-494a-9b06-f244b2ce2a63-kube-api-access-g9tvw\") pod \"8e50dc6c-1b93-494a-9b06-f244b2ce2a63\" (UID: \"8e50dc6c-1b93-494a-9b06-f244b2ce2a63\") " Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.388105 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e50dc6c-1b93-494a-9b06-f244b2ce2a63-kube-api-access-g9tvw" (OuterVolumeSpecName: "kube-api-access-g9tvw") pod "8e50dc6c-1b93-494a-9b06-f244b2ce2a63" (UID: "8e50dc6c-1b93-494a-9b06-f244b2ce2a63"). InnerVolumeSpecName "kube-api-access-g9tvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.476508 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-djbsb" Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.495653 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9tvw\" (UniqueName: \"kubernetes.io/projected/8e50dc6c-1b93-494a-9b06-f244b2ce2a63-kube-api-access-g9tvw\") on node \"crc\" DevicePath \"\"" Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.644987 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-gnqmc" Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.932287 4612 generic.go:334] "Generic (PLEG): container finished" podID="8e50dc6c-1b93-494a-9b06-f244b2ce2a63" containerID="2fadb4b482851c2dd7d2057251b4a24e9057c06645e9a83b9cccc22a4f4443dc" exitCode=0 Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.932346 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-7d7xz" event={"ID":"8e50dc6c-1b93-494a-9b06-f244b2ce2a63","Type":"ContainerDied","Data":"2fadb4b482851c2dd7d2057251b4a24e9057c06645e9a83b9cccc22a4f4443dc"} Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.932404 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-7d7xz" Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.932425 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-7d7xz" event={"ID":"8e50dc6c-1b93-494a-9b06-f244b2ce2a63","Type":"ContainerDied","Data":"65973338e671829c29796bbe5ea6a2b4f857d09c83b27a1f1fc145a3350de8a6"} Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.932448 4612 scope.go:117] "RemoveContainer" containerID="2fadb4b482851c2dd7d2057251b4a24e9057c06645e9a83b9cccc22a4f4443dc" Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.935357 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-vf48w" event={"ID":"5c654dcd-f507-4d31-b3d7-7230cc7cb086","Type":"ContainerStarted","Data":"417ac7abf7f9b75e0b97ee1a65fba0540ebd72f302c49851757d07e021f74114"} Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.966006 4612 scope.go:117] "RemoveContainer" containerID="2fadb4b482851c2dd7d2057251b4a24e9057c06645e9a83b9cccc22a4f4443dc" Dec 03 07:40:47 crc kubenswrapper[4612]: E1203 07:40:47.967267 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fadb4b482851c2dd7d2057251b4a24e9057c06645e9a83b9cccc22a4f4443dc\": container with ID starting with 2fadb4b482851c2dd7d2057251b4a24e9057c06645e9a83b9cccc22a4f4443dc not found: ID does not exist" containerID="2fadb4b482851c2dd7d2057251b4a24e9057c06645e9a83b9cccc22a4f4443dc" Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.967488 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fadb4b482851c2dd7d2057251b4a24e9057c06645e9a83b9cccc22a4f4443dc"} err="failed to get container status \"2fadb4b482851c2dd7d2057251b4a24e9057c06645e9a83b9cccc22a4f4443dc\": rpc error: code = NotFound desc = could not find container \"2fadb4b482851c2dd7d2057251b4a24e9057c06645e9a83b9cccc22a4f4443dc\": container with ID starting with 2fadb4b482851c2dd7d2057251b4a24e9057c06645e9a83b9cccc22a4f4443dc not found: ID does not exist" Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.975609 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-vf48w" podStartSLOduration=2.902049049 podStartE2EDuration="2.975580036s" podCreationTimestamp="2025-12-03 07:40:45 +0000 UTC" firstStartedPulling="2025-12-03 07:40:46.748125626 +0000 UTC m=+809.921483066" lastFinishedPulling="2025-12-03 07:40:46.821656653 +0000 UTC m=+809.995014053" observedRunningTime="2025-12-03 07:40:47.961105464 +0000 UTC m=+811.134462924" watchObservedRunningTime="2025-12-03 07:40:47.975580036 +0000 UTC m=+811.148937476" Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.992866 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-7d7xz"] Dec 03 07:40:47 crc kubenswrapper[4612]: I1203 07:40:47.997293 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-7d7xz"] Dec 03 07:40:48 crc kubenswrapper[4612]: I1203 07:40:48.088114 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-5r284" Dec 03 07:40:49 crc kubenswrapper[4612]: I1203 07:40:49.100485 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e50dc6c-1b93-494a-9b06-f244b2ce2a63" path="/var/lib/kubelet/pods/8e50dc6c-1b93-494a-9b06-f244b2ce2a63/volumes" Dec 03 07:40:56 crc kubenswrapper[4612]: I1203 07:40:56.312699 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-vf48w" Dec 03 07:40:56 crc kubenswrapper[4612]: I1203 07:40:56.314628 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-vf48w" Dec 03 07:40:56 crc kubenswrapper[4612]: I1203 07:40:56.353086 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-vf48w" Dec 03 07:40:57 crc kubenswrapper[4612]: I1203 07:40:57.040719 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-vf48w" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.238101 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k"] Dec 03 07:40:59 crc kubenswrapper[4612]: E1203 07:40:59.238669 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e50dc6c-1b93-494a-9b06-f244b2ce2a63" containerName="registry-server" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.238686 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e50dc6c-1b93-494a-9b06-f244b2ce2a63" containerName="registry-server" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.238857 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e50dc6c-1b93-494a-9b06-f244b2ce2a63" containerName="registry-server" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.239976 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.244307 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-hkdhp" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.263294 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k"] Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.366337 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzg4x\" (UniqueName: \"kubernetes.io/projected/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-kube-api-access-vzg4x\") pod \"32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k\" (UID: \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\") " pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.366509 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-bundle\") pod \"32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k\" (UID: \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\") " pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.366570 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-util\") pod \"32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k\" (UID: \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\") " pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.468218 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzg4x\" (UniqueName: \"kubernetes.io/projected/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-kube-api-access-vzg4x\") pod \"32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k\" (UID: \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\") " pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.468745 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-bundle\") pod \"32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k\" (UID: \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\") " pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.469034 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-util\") pod \"32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k\" (UID: \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\") " pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.469655 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-bundle\") pod \"32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k\" (UID: \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\") " pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.469688 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-util\") pod \"32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k\" (UID: \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\") " pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.499482 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzg4x\" (UniqueName: \"kubernetes.io/projected/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-kube-api-access-vzg4x\") pod \"32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k\" (UID: \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\") " pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" Dec 03 07:40:59 crc kubenswrapper[4612]: I1203 07:40:59.577645 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" Dec 03 07:41:00 crc kubenswrapper[4612]: I1203 07:41:00.067265 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k"] Dec 03 07:41:01 crc kubenswrapper[4612]: I1203 07:41:01.036192 4612 generic.go:334] "Generic (PLEG): container finished" podID="acb105f8-b0d1-48ce-81cf-e0f2f1a6202d" containerID="37d003b3f7a80641dd45397a507a0d5a788e7e24970aa985147af7daf2d02b07" exitCode=0 Dec 03 07:41:01 crc kubenswrapper[4612]: I1203 07:41:01.036237 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" event={"ID":"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d","Type":"ContainerDied","Data":"37d003b3f7a80641dd45397a507a0d5a788e7e24970aa985147af7daf2d02b07"} Dec 03 07:41:01 crc kubenswrapper[4612]: I1203 07:41:01.036526 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" event={"ID":"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d","Type":"ContainerStarted","Data":"3eb3e02a633badfb96d1e671ab4cf845210dfb6f438dd3efd81405e5b4903269"} Dec 03 07:41:02 crc kubenswrapper[4612]: I1203 07:41:02.045237 4612 generic.go:334] "Generic (PLEG): container finished" podID="acb105f8-b0d1-48ce-81cf-e0f2f1a6202d" containerID="b3f31febad0781aa771dd3bad4aa3afa1a485fc727c53025f0866f0842cd4f2e" exitCode=0 Dec 03 07:41:02 crc kubenswrapper[4612]: I1203 07:41:02.045320 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" event={"ID":"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d","Type":"ContainerDied","Data":"b3f31febad0781aa771dd3bad4aa3afa1a485fc727c53025f0866f0842cd4f2e"} Dec 03 07:41:03 crc kubenswrapper[4612]: I1203 07:41:03.056606 4612 generic.go:334] "Generic (PLEG): container finished" podID="acb105f8-b0d1-48ce-81cf-e0f2f1a6202d" containerID="553ea7c04e9840fc0b69f5a3581ed2f7c7103879344b2afac9949e9865c27c2b" exitCode=0 Dec 03 07:41:03 crc kubenswrapper[4612]: I1203 07:41:03.056867 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" event={"ID":"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d","Type":"ContainerDied","Data":"553ea7c04e9840fc0b69f5a3581ed2f7c7103879344b2afac9949e9865c27c2b"} Dec 03 07:41:04 crc kubenswrapper[4612]: I1203 07:41:04.306570 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" Dec 03 07:41:04 crc kubenswrapper[4612]: I1203 07:41:04.443190 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-bundle\") pod \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\" (UID: \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\") " Dec 03 07:41:04 crc kubenswrapper[4612]: I1203 07:41:04.443535 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzg4x\" (UniqueName: \"kubernetes.io/projected/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-kube-api-access-vzg4x\") pod \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\" (UID: \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\") " Dec 03 07:41:04 crc kubenswrapper[4612]: I1203 07:41:04.443646 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-util\") pod \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\" (UID: \"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d\") " Dec 03 07:41:04 crc kubenswrapper[4612]: I1203 07:41:04.447265 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-bundle" (OuterVolumeSpecName: "bundle") pod "acb105f8-b0d1-48ce-81cf-e0f2f1a6202d" (UID: "acb105f8-b0d1-48ce-81cf-e0f2f1a6202d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:41:04 crc kubenswrapper[4612]: I1203 07:41:04.457222 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-kube-api-access-vzg4x" (OuterVolumeSpecName: "kube-api-access-vzg4x") pod "acb105f8-b0d1-48ce-81cf-e0f2f1a6202d" (UID: "acb105f8-b0d1-48ce-81cf-e0f2f1a6202d"). InnerVolumeSpecName "kube-api-access-vzg4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:41:04 crc kubenswrapper[4612]: I1203 07:41:04.463155 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-util" (OuterVolumeSpecName: "util") pod "acb105f8-b0d1-48ce-81cf-e0f2f1a6202d" (UID: "acb105f8-b0d1-48ce-81cf-e0f2f1a6202d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:41:04 crc kubenswrapper[4612]: I1203 07:41:04.545132 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzg4x\" (UniqueName: \"kubernetes.io/projected/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-kube-api-access-vzg4x\") on node \"crc\" DevicePath \"\"" Dec 03 07:41:04 crc kubenswrapper[4612]: I1203 07:41:04.545387 4612 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-util\") on node \"crc\" DevicePath \"\"" Dec 03 07:41:04 crc kubenswrapper[4612]: I1203 07:41:04.545473 4612 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/acb105f8-b0d1-48ce-81cf-e0f2f1a6202d-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:41:05 crc kubenswrapper[4612]: I1203 07:41:05.074897 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" event={"ID":"acb105f8-b0d1-48ce-81cf-e0f2f1a6202d","Type":"ContainerDied","Data":"3eb3e02a633badfb96d1e671ab4cf845210dfb6f438dd3efd81405e5b4903269"} Dec 03 07:41:05 crc kubenswrapper[4612]: I1203 07:41:05.074990 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3eb3e02a633badfb96d1e671ab4cf845210dfb6f438dd3efd81405e5b4903269" Dec 03 07:41:05 crc kubenswrapper[4612]: I1203 07:41:05.075027 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k" Dec 03 07:41:11 crc kubenswrapper[4612]: I1203 07:41:11.406325 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-8f4757f6b-g7ncc"] Dec 03 07:41:11 crc kubenswrapper[4612]: E1203 07:41:11.406817 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acb105f8-b0d1-48ce-81cf-e0f2f1a6202d" containerName="util" Dec 03 07:41:11 crc kubenswrapper[4612]: I1203 07:41:11.406829 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="acb105f8-b0d1-48ce-81cf-e0f2f1a6202d" containerName="util" Dec 03 07:41:11 crc kubenswrapper[4612]: E1203 07:41:11.406846 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acb105f8-b0d1-48ce-81cf-e0f2f1a6202d" containerName="extract" Dec 03 07:41:11 crc kubenswrapper[4612]: I1203 07:41:11.406852 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="acb105f8-b0d1-48ce-81cf-e0f2f1a6202d" containerName="extract" Dec 03 07:41:11 crc kubenswrapper[4612]: E1203 07:41:11.406860 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acb105f8-b0d1-48ce-81cf-e0f2f1a6202d" containerName="pull" Dec 03 07:41:11 crc kubenswrapper[4612]: I1203 07:41:11.406868 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="acb105f8-b0d1-48ce-81cf-e0f2f1a6202d" containerName="pull" Dec 03 07:41:11 crc kubenswrapper[4612]: I1203 07:41:11.407011 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="acb105f8-b0d1-48ce-81cf-e0f2f1a6202d" containerName="extract" Dec 03 07:41:11 crc kubenswrapper[4612]: I1203 07:41:11.407388 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-8f4757f6b-g7ncc" Dec 03 07:41:11 crc kubenswrapper[4612]: I1203 07:41:11.409642 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-q5x27" Dec 03 07:41:11 crc kubenswrapper[4612]: I1203 07:41:11.449640 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pj447\" (UniqueName: \"kubernetes.io/projected/097f95f4-4fc4-43c0-aefd-da8b3c0111f3-kube-api-access-pj447\") pod \"openstack-operator-controller-operator-8f4757f6b-g7ncc\" (UID: \"097f95f4-4fc4-43c0-aefd-da8b3c0111f3\") " pod="openstack-operators/openstack-operator-controller-operator-8f4757f6b-g7ncc" Dec 03 07:41:11 crc kubenswrapper[4612]: I1203 07:41:11.500412 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-8f4757f6b-g7ncc"] Dec 03 07:41:11 crc kubenswrapper[4612]: I1203 07:41:11.551353 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pj447\" (UniqueName: \"kubernetes.io/projected/097f95f4-4fc4-43c0-aefd-da8b3c0111f3-kube-api-access-pj447\") pod \"openstack-operator-controller-operator-8f4757f6b-g7ncc\" (UID: \"097f95f4-4fc4-43c0-aefd-da8b3c0111f3\") " pod="openstack-operators/openstack-operator-controller-operator-8f4757f6b-g7ncc" Dec 03 07:41:11 crc kubenswrapper[4612]: I1203 07:41:11.568066 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pj447\" (UniqueName: \"kubernetes.io/projected/097f95f4-4fc4-43c0-aefd-da8b3c0111f3-kube-api-access-pj447\") pod \"openstack-operator-controller-operator-8f4757f6b-g7ncc\" (UID: \"097f95f4-4fc4-43c0-aefd-da8b3c0111f3\") " pod="openstack-operators/openstack-operator-controller-operator-8f4757f6b-g7ncc" Dec 03 07:41:11 crc kubenswrapper[4612]: I1203 07:41:11.723091 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-8f4757f6b-g7ncc" Dec 03 07:41:12 crc kubenswrapper[4612]: I1203 07:41:12.160196 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-8f4757f6b-g7ncc"] Dec 03 07:41:12 crc kubenswrapper[4612]: W1203 07:41:12.170109 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod097f95f4_4fc4_43c0_aefd_da8b3c0111f3.slice/crio-8887ba48901f849fb5f7783822cb312b04d21acaeb3748f0960e1afa754fb43d WatchSource:0}: Error finding container 8887ba48901f849fb5f7783822cb312b04d21acaeb3748f0960e1afa754fb43d: Status 404 returned error can't find the container with id 8887ba48901f849fb5f7783822cb312b04d21acaeb3748f0960e1afa754fb43d Dec 03 07:41:13 crc kubenswrapper[4612]: I1203 07:41:13.122911 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-8f4757f6b-g7ncc" event={"ID":"097f95f4-4fc4-43c0-aefd-da8b3c0111f3","Type":"ContainerStarted","Data":"8887ba48901f849fb5f7783822cb312b04d21acaeb3748f0960e1afa754fb43d"} Dec 03 07:41:17 crc kubenswrapper[4612]: I1203 07:41:17.150048 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-8f4757f6b-g7ncc" event={"ID":"097f95f4-4fc4-43c0-aefd-da8b3c0111f3","Type":"ContainerStarted","Data":"9f0b202bd5b1310609e227a82143f20ce5ceb2c18fd7728a4bc9461c47dc6aea"} Dec 03 07:41:17 crc kubenswrapper[4612]: I1203 07:41:17.150592 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-8f4757f6b-g7ncc" Dec 03 07:41:17 crc kubenswrapper[4612]: I1203 07:41:17.180126 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-8f4757f6b-g7ncc" podStartSLOduration=2.046764777 podStartE2EDuration="6.180107011s" podCreationTimestamp="2025-12-03 07:41:11 +0000 UTC" firstStartedPulling="2025-12-03 07:41:12.171634477 +0000 UTC m=+835.344991887" lastFinishedPulling="2025-12-03 07:41:16.304976721 +0000 UTC m=+839.478334121" observedRunningTime="2025-12-03 07:41:17.175039454 +0000 UTC m=+840.348396864" watchObservedRunningTime="2025-12-03 07:41:17.180107011 +0000 UTC m=+840.353464411" Dec 03 07:41:21 crc kubenswrapper[4612]: I1203 07:41:21.725393 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-8f4757f6b-g7ncc" Dec 03 07:41:32 crc kubenswrapper[4612]: I1203 07:41:32.120413 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c9h9z"] Dec 03 07:41:32 crc kubenswrapper[4612]: I1203 07:41:32.123498 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:32 crc kubenswrapper[4612]: I1203 07:41:32.136742 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c9h9z"] Dec 03 07:41:32 crc kubenswrapper[4612]: I1203 07:41:32.148489 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-catalog-content\") pod \"certified-operators-c9h9z\" (UID: \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\") " pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:32 crc kubenswrapper[4612]: I1203 07:41:32.148541 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nb75f\" (UniqueName: \"kubernetes.io/projected/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-kube-api-access-nb75f\") pod \"certified-operators-c9h9z\" (UID: \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\") " pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:32 crc kubenswrapper[4612]: I1203 07:41:32.148692 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-utilities\") pod \"certified-operators-c9h9z\" (UID: \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\") " pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:32 crc kubenswrapper[4612]: I1203 07:41:32.249814 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-utilities\") pod \"certified-operators-c9h9z\" (UID: \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\") " pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:32 crc kubenswrapper[4612]: I1203 07:41:32.249878 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nb75f\" (UniqueName: \"kubernetes.io/projected/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-kube-api-access-nb75f\") pod \"certified-operators-c9h9z\" (UID: \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\") " pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:32 crc kubenswrapper[4612]: I1203 07:41:32.249897 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-catalog-content\") pod \"certified-operators-c9h9z\" (UID: \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\") " pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:32 crc kubenswrapper[4612]: I1203 07:41:32.250412 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-catalog-content\") pod \"certified-operators-c9h9z\" (UID: \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\") " pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:32 crc kubenswrapper[4612]: I1203 07:41:32.250629 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-utilities\") pod \"certified-operators-c9h9z\" (UID: \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\") " pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:32 crc kubenswrapper[4612]: I1203 07:41:32.268644 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nb75f\" (UniqueName: \"kubernetes.io/projected/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-kube-api-access-nb75f\") pod \"certified-operators-c9h9z\" (UID: \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\") " pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:32 crc kubenswrapper[4612]: I1203 07:41:32.442408 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:32 crc kubenswrapper[4612]: I1203 07:41:32.914588 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c9h9z"] Dec 03 07:41:33 crc kubenswrapper[4612]: I1203 07:41:33.241929 4612 generic.go:334] "Generic (PLEG): container finished" podID="efd99d0c-82cb-4075-b1f5-4c0c9d03b721" containerID="77a0c9ccda3b8163e3781e9484b6a39debcd141c3e6ab802b9d3c33e8548afc7" exitCode=0 Dec 03 07:41:33 crc kubenswrapper[4612]: I1203 07:41:33.242133 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9h9z" event={"ID":"efd99d0c-82cb-4075-b1f5-4c0c9d03b721","Type":"ContainerDied","Data":"77a0c9ccda3b8163e3781e9484b6a39debcd141c3e6ab802b9d3c33e8548afc7"} Dec 03 07:41:33 crc kubenswrapper[4612]: I1203 07:41:33.242249 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9h9z" event={"ID":"efd99d0c-82cb-4075-b1f5-4c0c9d03b721","Type":"ContainerStarted","Data":"922da0a1f1ca333d5924ef7b7b5a7468a1761e458e3cfb07e9a321545f34e76a"} Dec 03 07:41:34 crc kubenswrapper[4612]: I1203 07:41:34.248821 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9h9z" event={"ID":"efd99d0c-82cb-4075-b1f5-4c0c9d03b721","Type":"ContainerStarted","Data":"47d952ee0092cdde4f1c79153914b06161937347a407d3c75bbe9bdc22c5f7fa"} Dec 03 07:41:35 crc kubenswrapper[4612]: I1203 07:41:35.255675 4612 generic.go:334] "Generic (PLEG): container finished" podID="efd99d0c-82cb-4075-b1f5-4c0c9d03b721" containerID="47d952ee0092cdde4f1c79153914b06161937347a407d3c75bbe9bdc22c5f7fa" exitCode=0 Dec 03 07:41:35 crc kubenswrapper[4612]: I1203 07:41:35.255741 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9h9z" event={"ID":"efd99d0c-82cb-4075-b1f5-4c0c9d03b721","Type":"ContainerDied","Data":"47d952ee0092cdde4f1c79153914b06161937347a407d3c75bbe9bdc22c5f7fa"} Dec 03 07:41:36 crc kubenswrapper[4612]: I1203 07:41:36.262533 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9h9z" event={"ID":"efd99d0c-82cb-4075-b1f5-4c0c9d03b721","Type":"ContainerStarted","Data":"1f0f516d72bbda99e8b05b9e514dafa46ebe8cd3264e9fce750b1c4fe4fea0b5"} Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.515454 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c9h9z" podStartSLOduration=4.13331283 podStartE2EDuration="6.515433651s" podCreationTimestamp="2025-12-03 07:41:32 +0000 UTC" firstStartedPulling="2025-12-03 07:41:33.244594084 +0000 UTC m=+856.417951484" lastFinishedPulling="2025-12-03 07:41:35.626714905 +0000 UTC m=+858.800072305" observedRunningTime="2025-12-03 07:41:36.289933321 +0000 UTC m=+859.463290731" watchObservedRunningTime="2025-12-03 07:41:38.515433651 +0000 UTC m=+861.688791071" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.516305 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.517438 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.519523 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-m82rs" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.524939 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.525787 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.527552 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-69l68" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.535438 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.569882 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-4cjnp"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.573317 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4cjnp" Dec 03 07:41:38 crc kubenswrapper[4612]: W1203 07:41:38.575345 4612 reflector.go:561] object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-twszp": failed to list *v1.Secret: secrets "designate-operator-controller-manager-dockercfg-twszp" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack-operators": no relationship found between node 'crc' and this object Dec 03 07:41:38 crc kubenswrapper[4612]: E1203 07:41:38.575394 4612 reflector.go:158] "Unhandled Error" err="object-\"openstack-operators\"/\"designate-operator-controller-manager-dockercfg-twszp\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"designate-operator-controller-manager-dockercfg-twszp\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.582252 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.601919 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-tklpk"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.639769 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-4cjnp"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.639832 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.639875 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tklpk" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.640298 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9g4h\" (UniqueName: \"kubernetes.io/projected/3d3cecf1-2f48-4b22-9350-870d25e786ef-kube-api-access-f9g4h\") pod \"barbican-operator-controller-manager-7d9dfd778-n2rzg\" (UID: \"3d3cecf1-2f48-4b22-9350-870d25e786ef\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.640360 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4j442\" (UniqueName: \"kubernetes.io/projected/3c3eb81e-314d-486d-afa4-443f33c54510-kube-api-access-4j442\") pod \"cinder-operator-controller-manager-859b6ccc6-2pxgx\" (UID: \"3c3eb81e-314d-486d-afa4-443f33c54510\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.640801 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.640909 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.645602 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-hpzm9" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.645910 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-x2pxb" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.723918 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-tklpk"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.735013 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.736025 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.739821 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-kmhkt" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.741554 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89gx5\" (UniqueName: \"kubernetes.io/projected/9f7c1634-c25d-4fc1-92bd-d95ef05c7868-kube-api-access-89gx5\") pod \"designate-operator-controller-manager-78b4bc895b-4cjnp\" (UID: \"9f7c1634-c25d-4fc1-92bd-d95ef05c7868\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4cjnp" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.741594 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9g4h\" (UniqueName: \"kubernetes.io/projected/3d3cecf1-2f48-4b22-9350-870d25e786ef-kube-api-access-f9g4h\") pod \"barbican-operator-controller-manager-7d9dfd778-n2rzg\" (UID: \"3d3cecf1-2f48-4b22-9350-870d25e786ef\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.741629 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j442\" (UniqueName: \"kubernetes.io/projected/3c3eb81e-314d-486d-afa4-443f33c54510-kube-api-access-4j442\") pod \"cinder-operator-controller-manager-859b6ccc6-2pxgx\" (UID: \"3c3eb81e-314d-486d-afa4-443f33c54510\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.741683 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54qmz\" (UniqueName: \"kubernetes.io/projected/d75ef15b-d718-436d-b570-21416a0c4021-kube-api-access-54qmz\") pod \"glance-operator-controller-manager-77987cd8cd-tklpk\" (UID: \"d75ef15b-d718-436d-b570-21416a0c4021\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tklpk" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.768964 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.801383 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.802438 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.814868 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9g4h\" (UniqueName: \"kubernetes.io/projected/3d3cecf1-2f48-4b22-9350-870d25e786ef-kube-api-access-f9g4h\") pod \"barbican-operator-controller-manager-7d9dfd778-n2rzg\" (UID: \"3d3cecf1-2f48-4b22-9350-870d25e786ef\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.819624 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-9q27j" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.819845 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.848173 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54qmz\" (UniqueName: \"kubernetes.io/projected/d75ef15b-d718-436d-b570-21416a0c4021-kube-api-access-54qmz\") pod \"glance-operator-controller-manager-77987cd8cd-tklpk\" (UID: \"d75ef15b-d718-436d-b570-21416a0c4021\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tklpk" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.848256 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvpk2\" (UniqueName: \"kubernetes.io/projected/b8715491-d469-4ade-8434-765685a955db-kube-api-access-lvpk2\") pod \"heat-operator-controller-manager-5f64f6f8bb-5rg69\" (UID: \"b8715491-d469-4ade-8434-765685a955db\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.848298 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6prc\" (UniqueName: \"kubernetes.io/projected/c2db30ef-0db3-44d4-b276-3b81195d4962-kube-api-access-g6prc\") pod \"horizon-operator-controller-manager-68c6d99b8f-bs99s\" (UID: \"c2db30ef-0db3-44d4-b276-3b81195d4962\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.848353 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89gx5\" (UniqueName: \"kubernetes.io/projected/9f7c1634-c25d-4fc1-92bd-d95ef05c7868-kube-api-access-89gx5\") pod \"designate-operator-controller-manager-78b4bc895b-4cjnp\" (UID: \"9f7c1634-c25d-4fc1-92bd-d95ef05c7868\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4cjnp" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.875519 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4j442\" (UniqueName: \"kubernetes.io/projected/3c3eb81e-314d-486d-afa4-443f33c54510-kube-api-access-4j442\") pod \"cinder-operator-controller-manager-859b6ccc6-2pxgx\" (UID: \"3c3eb81e-314d-486d-afa4-443f33c54510\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.886000 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54qmz\" (UniqueName: \"kubernetes.io/projected/d75ef15b-d718-436d-b570-21416a0c4021-kube-api-access-54qmz\") pod \"glance-operator-controller-manager-77987cd8cd-tklpk\" (UID: \"d75ef15b-d718-436d-b570-21416a0c4021\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tklpk" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.886999 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.887405 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89gx5\" (UniqueName: \"kubernetes.io/projected/9f7c1634-c25d-4fc1-92bd-d95ef05c7868-kube-api-access-89gx5\") pod \"designate-operator-controller-manager-78b4bc895b-4cjnp\" (UID: \"9f7c1634-c25d-4fc1-92bd-d95ef05c7868\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4cjnp" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.889574 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.917663 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.918965 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.938433 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-jvrtz" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.942121 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.948047 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9"] Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.949352 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.950869 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-2dklp" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.957354 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvpk2\" (UniqueName: \"kubernetes.io/projected/b8715491-d469-4ade-8434-765685a955db-kube-api-access-lvpk2\") pod \"heat-operator-controller-manager-5f64f6f8bb-5rg69\" (UID: \"b8715491-d469-4ade-8434-765685a955db\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.957412 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6prc\" (UniqueName: \"kubernetes.io/projected/c2db30ef-0db3-44d4-b276-3b81195d4962-kube-api-access-g6prc\") pod \"horizon-operator-controller-manager-68c6d99b8f-bs99s\" (UID: \"c2db30ef-0db3-44d4-b276-3b81195d4962\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.957476 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert\") pod \"infra-operator-controller-manager-57548d458d-rdf2f\" (UID: \"d70b0d51-8225-4d2b-b128-aeda29446ab9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.957517 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spf2n\" (UniqueName: \"kubernetes.io/projected/d70b0d51-8225-4d2b-b128-aeda29446ab9-kube-api-access-spf2n\") pod \"infra-operator-controller-manager-57548d458d-rdf2f\" (UID: \"d70b0d51-8225-4d2b-b128-aeda29446ab9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:41:38 crc kubenswrapper[4612]: I1203 07:41:38.966434 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:38.992919 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tklpk" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:38.999061 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.026439 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.027563 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.042353 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.046810 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-jqrzx" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.047486 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvpk2\" (UniqueName: \"kubernetes.io/projected/b8715491-d469-4ade-8434-765685a955db-kube-api-access-lvpk2\") pod \"heat-operator-controller-manager-5f64f6f8bb-5rg69\" (UID: \"b8715491-d469-4ade-8434-765685a955db\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.047596 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.050425 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.058492 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8d9z\" (UniqueName: \"kubernetes.io/projected/58adadbb-3706-4f8c-be33-31836f4860e5-kube-api-access-h8d9z\") pod \"keystone-operator-controller-manager-7765d96ddf-flkb9\" (UID: \"58adadbb-3706-4f8c-be33-31836f4860e5\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.058532 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert\") pod \"infra-operator-controller-manager-57548d458d-rdf2f\" (UID: \"d70b0d51-8225-4d2b-b128-aeda29446ab9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.058562 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ns9s8\" (UniqueName: \"kubernetes.io/projected/39305f1e-8b3f-43aa-97d4-48410cc7fe91-kube-api-access-ns9s8\") pod \"ironic-operator-controller-manager-6c548fd776-6mbrd\" (UID: \"39305f1e-8b3f-43aa-97d4-48410cc7fe91\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.058581 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spf2n\" (UniqueName: \"kubernetes.io/projected/d70b0d51-8225-4d2b-b128-aeda29446ab9-kube-api-access-spf2n\") pod \"infra-operator-controller-manager-57548d458d-rdf2f\" (UID: \"d70b0d51-8225-4d2b-b128-aeda29446ab9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:41:39 crc kubenswrapper[4612]: E1203 07:41:39.058920 4612 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 07:41:39 crc kubenswrapper[4612]: E1203 07:41:39.058974 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert podName:d70b0d51-8225-4d2b-b128-aeda29446ab9 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:39.558959176 +0000 UTC m=+862.732316576 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert") pod "infra-operator-controller-manager-57548d458d-rdf2f" (UID: "d70b0d51-8225-4d2b-b128-aeda29446ab9") : secret "infra-operator-webhook-server-cert" not found Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.068636 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-r8kgb" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.079352 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.088127 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.089961 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.105293 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-rsfpp" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.116567 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6prc\" (UniqueName: \"kubernetes.io/projected/c2db30ef-0db3-44d4-b276-3b81195d4962-kube-api-access-g6prc\") pod \"horizon-operator-controller-manager-68c6d99b8f-bs99s\" (UID: \"c2db30ef-0db3-44d4-b276-3b81195d4962\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.120788 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spf2n\" (UniqueName: \"kubernetes.io/projected/d70b0d51-8225-4d2b-b128-aeda29446ab9-kube-api-access-spf2n\") pod \"infra-operator-controller-manager-57548d458d-rdf2f\" (UID: \"d70b0d51-8225-4d2b-b128-aeda29446ab9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.126636 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.126664 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.127547 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.138331 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.139305 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.159669 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gwh5\" (UniqueName: \"kubernetes.io/projected/e4cadc4e-ebfd-4886-83a2-1caf4aef2b68-kube-api-access-2gwh5\") pod \"mariadb-operator-controller-manager-56bbcc9d85-m898g\" (UID: \"e4cadc4e-ebfd-4886-83a2-1caf4aef2b68\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.159735 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8d9z\" (UniqueName: \"kubernetes.io/projected/58adadbb-3706-4f8c-be33-31836f4860e5-kube-api-access-h8d9z\") pod \"keystone-operator-controller-manager-7765d96ddf-flkb9\" (UID: \"58adadbb-3706-4f8c-be33-31836f4860e5\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.159778 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6tj7\" (UniqueName: \"kubernetes.io/projected/790a3a61-40c9-4360-b7b6-9f08edbec437-kube-api-access-b6tj7\") pod \"manila-operator-controller-manager-7c79b5df47-d98bb\" (UID: \"790a3a61-40c9-4360-b7b6-9f08edbec437\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.159822 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ns9s8\" (UniqueName: \"kubernetes.io/projected/39305f1e-8b3f-43aa-97d4-48410cc7fe91-kube-api-access-ns9s8\") pod \"ironic-operator-controller-manager-6c548fd776-6mbrd\" (UID: \"39305f1e-8b3f-43aa-97d4-48410cc7fe91\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.164458 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-9lpfc" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.164724 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-xb28n" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.166870 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.184991 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.214746 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8d9z\" (UniqueName: \"kubernetes.io/projected/58adadbb-3706-4f8c-be33-31836f4860e5-kube-api-access-h8d9z\") pod \"keystone-operator-controller-manager-7765d96ddf-flkb9\" (UID: \"58adadbb-3706-4f8c-be33-31836f4860e5\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.214812 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.215762 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.224497 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ns9s8\" (UniqueName: \"kubernetes.io/projected/39305f1e-8b3f-43aa-97d4-48410cc7fe91-kube-api-access-ns9s8\") pod \"ironic-operator-controller-manager-6c548fd776-6mbrd\" (UID: \"39305f1e-8b3f-43aa-97d4-48410cc7fe91\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.235844 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-jbfkr"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.237923 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jbfkr" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.241637 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.246049 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.248887 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-5bjpq" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.261043 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pchq\" (UniqueName: \"kubernetes.io/projected/d9951bd4-0756-4c79-96b0-ceaac8a1e51a-kube-api-access-5pchq\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-r75nm\" (UID: \"d9951bd4-0756-4c79-96b0-ceaac8a1e51a\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.261110 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6tj7\" (UniqueName: \"kubernetes.io/projected/790a3a61-40c9-4360-b7b6-9f08edbec437-kube-api-access-b6tj7\") pod \"manila-operator-controller-manager-7c79b5df47-d98bb\" (UID: \"790a3a61-40c9-4360-b7b6-9f08edbec437\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.261142 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6mcw\" (UniqueName: \"kubernetes.io/projected/24a39a3f-a75f-4029-b861-cf683db5aae2-kube-api-access-v6mcw\") pod \"nova-operator-controller-manager-697bc559fc-4vp27\" (UID: \"24a39a3f-a75f-4029-b861-cf683db5aae2\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.261226 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sptwj\" (UniqueName: \"kubernetes.io/projected/ec8611ec-2e0f-4906-af03-7dc350e7e783-kube-api-access-sptwj\") pod \"octavia-operator-controller-manager-998648c74-b2s4p\" (UID: \"ec8611ec-2e0f-4906-af03-7dc350e7e783\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.261260 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gwh5\" (UniqueName: \"kubernetes.io/projected/e4cadc4e-ebfd-4886-83a2-1caf4aef2b68-kube-api-access-2gwh5\") pod \"mariadb-operator-controller-manager-56bbcc9d85-m898g\" (UID: \"e4cadc4e-ebfd-4886-83a2-1caf4aef2b68\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.275585 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-c258n" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.285534 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-jbfkr"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.302441 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.318908 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gwh5\" (UniqueName: \"kubernetes.io/projected/e4cadc4e-ebfd-4886-83a2-1caf4aef2b68-kube-api-access-2gwh5\") pod \"mariadb-operator-controller-manager-56bbcc9d85-m898g\" (UID: \"e4cadc4e-ebfd-4886-83a2-1caf4aef2b68\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.330064 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-w62v7"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.331060 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.332322 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.337306 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-2v4rz" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.349653 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6tj7\" (UniqueName: \"kubernetes.io/projected/790a3a61-40c9-4360-b7b6-9f08edbec437-kube-api-access-b6tj7\") pod \"manila-operator-controller-manager-7c79b5df47-d98bb\" (UID: \"790a3a61-40c9-4360-b7b6-9f08edbec437\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.363435 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.364286 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6mcw\" (UniqueName: \"kubernetes.io/projected/24a39a3f-a75f-4029-b861-cf683db5aae2-kube-api-access-v6mcw\") pod \"nova-operator-controller-manager-697bc559fc-4vp27\" (UID: \"24a39a3f-a75f-4029-b861-cf683db5aae2\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.364312 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxpv4\" (UniqueName: \"kubernetes.io/projected/a9a1ffeb-b3b4-4b07-911b-b829962b6827-kube-api-access-dxpv4\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9\" (UID: \"a9a1ffeb-b3b4-4b07-911b-b829962b6827\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.364370 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9\" (UID: \"a9a1ffeb-b3b4-4b07-911b-b829962b6827\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.364392 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sptwj\" (UniqueName: \"kubernetes.io/projected/ec8611ec-2e0f-4906-af03-7dc350e7e783-kube-api-access-sptwj\") pod \"octavia-operator-controller-manager-998648c74-b2s4p\" (UID: \"ec8611ec-2e0f-4906-af03-7dc350e7e783\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.364430 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk9t9\" (UniqueName: \"kubernetes.io/projected/99188e23-da4a-4d43-8778-a2a0b9e962dc-kube-api-access-mk9t9\") pod \"ovn-operator-controller-manager-b6456fdb6-jbfkr\" (UID: \"99188e23-da4a-4d43-8778-a2a0b9e962dc\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jbfkr" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.364449 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pchq\" (UniqueName: \"kubernetes.io/projected/d9951bd4-0756-4c79-96b0-ceaac8a1e51a-kube-api-access-5pchq\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-r75nm\" (UID: \"d9951bd4-0756-4c79-96b0-ceaac8a1e51a\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.379784 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.392348 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.400887 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-w62v7"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.429644 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.430368 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.442647 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.443671 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.444081 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.485830 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-mrlxv" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.486559 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sptwj\" (UniqueName: \"kubernetes.io/projected/ec8611ec-2e0f-4906-af03-7dc350e7e783-kube-api-access-sptwj\") pod \"octavia-operator-controller-manager-998648c74-b2s4p\" (UID: \"ec8611ec-2e0f-4906-af03-7dc350e7e783\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.494570 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6mcw\" (UniqueName: \"kubernetes.io/projected/24a39a3f-a75f-4029-b861-cf683db5aae2-kube-api-access-v6mcw\") pod \"nova-operator-controller-manager-697bc559fc-4vp27\" (UID: \"24a39a3f-a75f-4029-b861-cf683db5aae2\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.507379 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-94jtq" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.508420 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pchq\" (UniqueName: \"kubernetes.io/projected/d9951bd4-0756-4c79-96b0-ceaac8a1e51a-kube-api-access-5pchq\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-r75nm\" (UID: \"d9951bd4-0756-4c79-96b0-ceaac8a1e51a\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.515823 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.517461 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk9t9\" (UniqueName: \"kubernetes.io/projected/99188e23-da4a-4d43-8778-a2a0b9e962dc-kube-api-access-mk9t9\") pod \"ovn-operator-controller-manager-b6456fdb6-jbfkr\" (UID: \"99188e23-da4a-4d43-8778-a2a0b9e962dc\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jbfkr" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.517520 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxpv4\" (UniqueName: \"kubernetes.io/projected/a9a1ffeb-b3b4-4b07-911b-b829962b6827-kube-api-access-dxpv4\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9\" (UID: \"a9a1ffeb-b3b4-4b07-911b-b829962b6827\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.517584 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m9bb\" (UniqueName: \"kubernetes.io/projected/1ac4eade-01c8-4323-8796-6b2d39a7ee36-kube-api-access-6m9bb\") pod \"placement-operator-controller-manager-78f8948974-w62v7\" (UID: \"1ac4eade-01c8-4323-8796-6b2d39a7ee36\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.517622 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9\" (UID: \"a9a1ffeb-b3b4-4b07-911b-b829962b6827\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:41:39 crc kubenswrapper[4612]: E1203 07:41:39.517753 4612 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 07:41:39 crc kubenswrapper[4612]: E1203 07:41:39.521552 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert podName:a9a1ffeb-b3b4-4b07-911b-b829962b6827 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:40.017786217 +0000 UTC m=+863.191143617 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" (UID: "a9a1ffeb-b3b4-4b07-911b-b829962b6827") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.540043 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-twszp" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.543683 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4cjnp" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.556350 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.579684 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.589974 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxpv4\" (UniqueName: \"kubernetes.io/projected/a9a1ffeb-b3b4-4b07-911b-b829962b6827-kube-api-access-dxpv4\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9\" (UID: \"a9a1ffeb-b3b4-4b07-911b-b829962b6827\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.635702 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert\") pod \"infra-operator-controller-manager-57548d458d-rdf2f\" (UID: \"d70b0d51-8225-4d2b-b128-aeda29446ab9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.635914 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m9bb\" (UniqueName: \"kubernetes.io/projected/1ac4eade-01c8-4323-8796-6b2d39a7ee36-kube-api-access-6m9bb\") pod \"placement-operator-controller-manager-78f8948974-w62v7\" (UID: \"1ac4eade-01c8-4323-8796-6b2d39a7ee36\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" Dec 03 07:41:39 crc kubenswrapper[4612]: E1203 07:41:39.636003 4612 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.636043 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clhxj\" (UniqueName: \"kubernetes.io/projected/2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2-kube-api-access-clhxj\") pod \"telemetry-operator-controller-manager-76cc84c6bb-jbbw2\" (UID: \"2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" Dec 03 07:41:39 crc kubenswrapper[4612]: E1203 07:41:39.636059 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert podName:d70b0d51-8225-4d2b-b128-aeda29446ab9 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:40.636042991 +0000 UTC m=+863.809400391 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert") pod "infra-operator-controller-manager-57548d458d-rdf2f" (UID: "d70b0d51-8225-4d2b-b128-aeda29446ab9") : secret "infra-operator-webhook-server-cert" not found Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.636122 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g26sh\" (UniqueName: \"kubernetes.io/projected/5a8351bf-c4cf-40fc-8df9-22b3064770a3-kube-api-access-g26sh\") pod \"swift-operator-controller-manager-5f8c65bbfc-kv6n4\" (UID: \"5a8351bf-c4cf-40fc-8df9-22b3064770a3\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.744168 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk9t9\" (UniqueName: \"kubernetes.io/projected/99188e23-da4a-4d43-8778-a2a0b9e962dc-kube-api-access-mk9t9\") pod \"ovn-operator-controller-manager-b6456fdb6-jbfkr\" (UID: \"99188e23-da4a-4d43-8778-a2a0b9e962dc\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jbfkr" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.757706 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.757783 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clhxj\" (UniqueName: \"kubernetes.io/projected/2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2-kube-api-access-clhxj\") pod \"telemetry-operator-controller-manager-76cc84c6bb-jbbw2\" (UID: \"2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.757837 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g26sh\" (UniqueName: \"kubernetes.io/projected/5a8351bf-c4cf-40fc-8df9-22b3064770a3-kube-api-access-g26sh\") pod \"swift-operator-controller-manager-5f8c65bbfc-kv6n4\" (UID: \"5a8351bf-c4cf-40fc-8df9-22b3064770a3\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.762057 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.775354 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-snwlr"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.776434 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-snwlr" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.782119 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-7rmpc" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.790268 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m9bb\" (UniqueName: \"kubernetes.io/projected/1ac4eade-01c8-4323-8796-6b2d39a7ee36-kube-api-access-6m9bb\") pod \"placement-operator-controller-manager-78f8948974-w62v7\" (UID: \"1ac4eade-01c8-4323-8796-6b2d39a7ee36\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.812057 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-snwlr"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.823284 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g26sh\" (UniqueName: \"kubernetes.io/projected/5a8351bf-c4cf-40fc-8df9-22b3064770a3-kube-api-access-g26sh\") pod \"swift-operator-controller-manager-5f8c65bbfc-kv6n4\" (UID: \"5a8351bf-c4cf-40fc-8df9-22b3064770a3\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.830238 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clhxj\" (UniqueName: \"kubernetes.io/projected/2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2-kube-api-access-clhxj\") pod \"telemetry-operator-controller-manager-76cc84c6bb-jbbw2\" (UID: \"2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.836300 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.837413 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.850222 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-pszws" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.859233 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.859634 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jbfkr" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.960316 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.961180 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sbn2\" (UniqueName: \"kubernetes.io/projected/797a523a-540b-4b10-a294-0543192f0c56-kube-api-access-4sbn2\") pod \"watcher-operator-controller-manager-769dc69bc-495qs\" (UID: \"797a523a-540b-4b10-a294-0543192f0c56\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.961216 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pzw6\" (UniqueName: \"kubernetes.io/projected/b52c7da9-b392-448b-a04a-1afa333df442-kube-api-access-5pzw6\") pod \"test-operator-controller-manager-5854674fcc-snwlr\" (UID: \"b52c7da9-b392-448b-a04a-1afa333df442\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-snwlr" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.988737 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw"] Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.989561 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.993816 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.994004 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 03 07:41:39 crc kubenswrapper[4612]: I1203 07:41:39.994116 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-z7k29" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.006224 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.009839 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw"] Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.038014 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6fzpq"] Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.038899 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6fzpq" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.040996 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-7qrjj" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.067788 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9\" (UID: \"a9a1ffeb-b3b4-4b07-911b-b829962b6827\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.067843 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sbn2\" (UniqueName: \"kubernetes.io/projected/797a523a-540b-4b10-a294-0543192f0c56-kube-api-access-4sbn2\") pod \"watcher-operator-controller-manager-769dc69bc-495qs\" (UID: \"797a523a-540b-4b10-a294-0543192f0c56\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.067876 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pzw6\" (UniqueName: \"kubernetes.io/projected/b52c7da9-b392-448b-a04a-1afa333df442-kube-api-access-5pzw6\") pod \"test-operator-controller-manager-5854674fcc-snwlr\" (UID: \"b52c7da9-b392-448b-a04a-1afa333df442\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-snwlr" Dec 03 07:41:40 crc kubenswrapper[4612]: E1203 07:41:40.068248 4612 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 07:41:40 crc kubenswrapper[4612]: E1203 07:41:40.068293 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert podName:a9a1ffeb-b3b4-4b07-911b-b829962b6827 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:41.068279898 +0000 UTC m=+864.241637288 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" (UID: "a9a1ffeb-b3b4-4b07-911b-b829962b6827") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.072033 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-tklpk"] Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.072156 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.077584 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6fzpq"] Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.101155 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pzw6\" (UniqueName: \"kubernetes.io/projected/b52c7da9-b392-448b-a04a-1afa333df442-kube-api-access-5pzw6\") pod \"test-operator-controller-manager-5854674fcc-snwlr\" (UID: \"b52c7da9-b392-448b-a04a-1afa333df442\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-snwlr" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.109083 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-snwlr" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.122152 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sbn2\" (UniqueName: \"kubernetes.io/projected/797a523a-540b-4b10-a294-0543192f0c56-kube-api-access-4sbn2\") pod \"watcher-operator-controller-manager-769dc69bc-495qs\" (UID: \"797a523a-540b-4b10-a294-0543192f0c56\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.168889 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrcw5\" (UniqueName: \"kubernetes.io/projected/8fd04295-8c24-459f-b2d5-1fee88165e78-kube-api-access-lrcw5\") pod \"rabbitmq-cluster-operator-manager-668c99d594-6fzpq\" (UID: \"8fd04295-8c24-459f-b2d5-1fee88165e78\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6fzpq" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.168984 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9kzh\" (UniqueName: \"kubernetes.io/projected/f6260167-bcb0-4f9e-8a44-6cd47d248296-kube-api-access-v9kzh\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.169023 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.169051 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.179916 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.201156 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg"] Dec 03 07:41:40 crc kubenswrapper[4612]: W1203 07:41:40.248715 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d3cecf1_2f48_4b22_9350_870d25e786ef.slice/crio-42391a1a402f4bfff56450166c0d6cbc4feed4527994b1559d5f7ad034700e06 WatchSource:0}: Error finding container 42391a1a402f4bfff56450166c0d6cbc4feed4527994b1559d5f7ad034700e06: Status 404 returned error can't find the container with id 42391a1a402f4bfff56450166c0d6cbc4feed4527994b1559d5f7ad034700e06 Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.271634 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrcw5\" (UniqueName: \"kubernetes.io/projected/8fd04295-8c24-459f-b2d5-1fee88165e78-kube-api-access-lrcw5\") pod \"rabbitmq-cluster-operator-manager-668c99d594-6fzpq\" (UID: \"8fd04295-8c24-459f-b2d5-1fee88165e78\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6fzpq" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.271726 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9kzh\" (UniqueName: \"kubernetes.io/projected/f6260167-bcb0-4f9e-8a44-6cd47d248296-kube-api-access-v9kzh\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.271786 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.271817 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:40 crc kubenswrapper[4612]: E1203 07:41:40.272021 4612 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 07:41:40 crc kubenswrapper[4612]: E1203 07:41:40.272078 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs podName:f6260167-bcb0-4f9e-8a44-6cd47d248296 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:40.772060228 +0000 UTC m=+863.945417628 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs") pod "openstack-operator-controller-manager-64f7f94cfd-bdkfw" (UID: "f6260167-bcb0-4f9e-8a44-6cd47d248296") : secret "metrics-server-cert" not found Dec 03 07:41:40 crc kubenswrapper[4612]: E1203 07:41:40.272373 4612 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 07:41:40 crc kubenswrapper[4612]: E1203 07:41:40.272404 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs podName:f6260167-bcb0-4f9e-8a44-6cd47d248296 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:40.772394176 +0000 UTC m=+863.945751576 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs") pod "openstack-operator-controller-manager-64f7f94cfd-bdkfw" (UID: "f6260167-bcb0-4f9e-8a44-6cd47d248296") : secret "webhook-server-cert" not found Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.312336 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9kzh\" (UniqueName: \"kubernetes.io/projected/f6260167-bcb0-4f9e-8a44-6cd47d248296-kube-api-access-v9kzh\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.322319 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrcw5\" (UniqueName: \"kubernetes.io/projected/8fd04295-8c24-459f-b2d5-1fee88165e78-kube-api-access-lrcw5\") pod \"rabbitmq-cluster-operator-manager-668c99d594-6fzpq\" (UID: \"8fd04295-8c24-459f-b2d5-1fee88165e78\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6fzpq" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.368367 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6fzpq" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.374202 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg" event={"ID":"3d3cecf1-2f48-4b22-9350-870d25e786ef","Type":"ContainerStarted","Data":"42391a1a402f4bfff56450166c0d6cbc4feed4527994b1559d5f7ad034700e06"} Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.389184 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tklpk" event={"ID":"d75ef15b-d718-436d-b570-21416a0c4021","Type":"ContainerStarted","Data":"3fdf6005833ab2ae9b6d33dbcc383889d7928a3ff24bad87830fa50954bb8c0b"} Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.410328 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx"] Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.459272 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd"] Dec 03 07:41:40 crc kubenswrapper[4612]: W1203 07:41:40.489315 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c3eb81e_314d_486d_afa4_443f33c54510.slice/crio-171b94fbe0ce37de7f93cda9aefbfd89165db7ef35173c5faa7afd51236c20e9 WatchSource:0}: Error finding container 171b94fbe0ce37de7f93cda9aefbfd89165db7ef35173c5faa7afd51236c20e9: Status 404 returned error can't find the container with id 171b94fbe0ce37de7f93cda9aefbfd89165db7ef35173c5faa7afd51236c20e9 Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.633615 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69"] Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.682644 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert\") pod \"infra-operator-controller-manager-57548d458d-rdf2f\" (UID: \"d70b0d51-8225-4d2b-b128-aeda29446ab9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:41:40 crc kubenswrapper[4612]: E1203 07:41:40.682914 4612 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 07:41:40 crc kubenswrapper[4612]: E1203 07:41:40.682987 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert podName:d70b0d51-8225-4d2b-b128-aeda29446ab9 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:42.682970292 +0000 UTC m=+865.856327692 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert") pod "infra-operator-controller-manager-57548d458d-rdf2f" (UID: "d70b0d51-8225-4d2b-b128-aeda29446ab9") : secret "infra-operator-webhook-server-cert" not found Dec 03 07:41:40 crc kubenswrapper[4612]: W1203 07:41:40.727200 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8715491_d469_4ade_8434_765685a955db.slice/crio-0d38f9c9b08c278f03eb47e66f46de095b880c8a3571fd5859e7fec172db0293 WatchSource:0}: Error finding container 0d38f9c9b08c278f03eb47e66f46de095b880c8a3571fd5859e7fec172db0293: Status 404 returned error can't find the container with id 0d38f9c9b08c278f03eb47e66f46de095b880c8a3571fd5859e7fec172db0293 Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.784034 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.784167 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:40 crc kubenswrapper[4612]: E1203 07:41:40.784276 4612 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 07:41:40 crc kubenswrapper[4612]: E1203 07:41:40.784320 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs podName:f6260167-bcb0-4f9e-8a44-6cd47d248296 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:41.784306043 +0000 UTC m=+864.957663443 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs") pod "openstack-operator-controller-manager-64f7f94cfd-bdkfw" (UID: "f6260167-bcb0-4f9e-8a44-6cd47d248296") : secret "webhook-server-cert" not found Dec 03 07:41:40 crc kubenswrapper[4612]: E1203 07:41:40.784631 4612 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 07:41:40 crc kubenswrapper[4612]: E1203 07:41:40.784657 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs podName:f6260167-bcb0-4f9e-8a44-6cd47d248296 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:41.784648952 +0000 UTC m=+864.958006352 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs") pod "openstack-operator-controller-manager-64f7f94cfd-bdkfw" (UID: "f6260167-bcb0-4f9e-8a44-6cd47d248296") : secret "metrics-server-cert" not found Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.814833 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9"] Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.837906 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb"] Dec 03 07:41:40 crc kubenswrapper[4612]: I1203 07:41:40.848984 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s"] Dec 03 07:41:40 crc kubenswrapper[4612]: W1203 07:41:40.852467 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2db30ef_0db3_44d4_b276_3b81195d4962.slice/crio-937034f3d96bc79001adff6d67b8a0ca74c7f482468b15e7c48331f0774ff033 WatchSource:0}: Error finding container 937034f3d96bc79001adff6d67b8a0ca74c7f482468b15e7c48331f0774ff033: Status 404 returned error can't find the container with id 937034f3d96bc79001adff6d67b8a0ca74c7f482468b15e7c48331f0774ff033 Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.130781 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9\" (UID: \"a9a1ffeb-b3b4-4b07-911b-b829962b6827\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.130975 4612 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.131019 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert podName:a9a1ffeb-b3b4-4b07-911b-b829962b6827 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:43.131006973 +0000 UTC m=+866.304364363 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" (UID: "a9a1ffeb-b3b4-4b07-911b-b829962b6827") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.147196 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p"] Dec 03 07:41:41 crc kubenswrapper[4612]: W1203 07:41:41.154299 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec8611ec_2e0f_4906_af03_7dc350e7e783.slice/crio-19b4a443585e2afeb82f563e72a34aa1bb08a6a31aeeaf50fd8d753a56a838b6 WatchSource:0}: Error finding container 19b4a443585e2afeb82f563e72a34aa1bb08a6a31aeeaf50fd8d753a56a838b6: Status 404 returned error can't find the container with id 19b4a443585e2afeb82f563e72a34aa1bb08a6a31aeeaf50fd8d753a56a838b6 Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.175015 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-4cjnp"] Dec 03 07:41:41 crc kubenswrapper[4612]: W1203 07:41:41.185666 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a8351bf_c4cf_40fc_8df9_22b3064770a3.slice/crio-3da7b6922c49d6bcc64805b590c5172dc1aaa17a8ba986c02c7e09819d112da0 WatchSource:0}: Error finding container 3da7b6922c49d6bcc64805b590c5172dc1aaa17a8ba986c02c7e09819d112da0: Status 404 returned error can't find the container with id 3da7b6922c49d6bcc64805b590c5172dc1aaa17a8ba986c02c7e09819d112da0 Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.190967 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4"] Dec 03 07:41:41 crc kubenswrapper[4612]: W1203 07:41:41.191695 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4cadc4e_ebfd_4886_83a2_1caf4aef2b68.slice/crio-6de2881b28d0a4ca0134863a551fcb6b1a388f6c9241c36128ff93ba4332429c WatchSource:0}: Error finding container 6de2881b28d0a4ca0134863a551fcb6b1a388f6c9241c36128ff93ba4332429c: Status 404 returned error can't find the container with id 6de2881b28d0a4ca0134863a551fcb6b1a388f6c9241c36128ff93ba4332429c Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.196139 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g"] Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.203205 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27"] Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.240045 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs"] Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.243604 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-snwlr"] Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.253409 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-jbfkr"] Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.259076 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm"] Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.263253 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2"] Dec 03 07:41:41 crc kubenswrapper[4612]: W1203 07:41:41.263601 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9951bd4_0756_4c79_96b0_ceaac8a1e51a.slice/crio-6ee2ce7b683fc7c6dcbdc7ba27b23e48a4ee18e26cc1a3a5b5b1ddc32842dc3f WatchSource:0}: Error finding container 6ee2ce7b683fc7c6dcbdc7ba27b23e48a4ee18e26cc1a3a5b5b1ddc32842dc3f: Status 404 returned error can't find the container with id 6ee2ce7b683fc7c6dcbdc7ba27b23e48a4ee18e26cc1a3a5b5b1ddc32842dc3f Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.264628 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4sbn2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-495qs_openstack-operators(797a523a-540b-4b10-a294-0543192f0c56): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.265970 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5pchq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-r75nm_openstack-operators(d9951bd4-0756-4c79-96b0-ceaac8a1e51a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.266901 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4sbn2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-495qs_openstack-operators(797a523a-540b-4b10-a294-0543192f0c56): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.268023 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs" podUID="797a523a-540b-4b10-a294-0543192f0c56" Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.274680 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5pchq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-r75nm_openstack-operators(d9951bd4-0756-4c79-96b0-ceaac8a1e51a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.275872 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-clhxj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-jbbw2_openstack-operators(2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.276015 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm" podUID="d9951bd4-0756-4c79-96b0-ceaac8a1e51a" Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.279265 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-clhxj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-jbbw2_openstack-operators(2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.280711 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" podUID="2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2" Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.376940 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-w62v7"] Dec 03 07:41:41 crc kubenswrapper[4612]: W1203 07:41:41.381277 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ac4eade_01c8_4323_8796_6b2d39a7ee36.slice/crio-23c6532de992e37aaa4c05c117688b4f949bf5f924f787a6c6f389b84e1b71c8 WatchSource:0}: Error finding container 23c6532de992e37aaa4c05c117688b4f949bf5f924f787a6c6f389b84e1b71c8: Status 404 returned error can't find the container with id 23c6532de992e37aaa4c05c117688b4f949bf5f924f787a6c6f389b84e1b71c8 Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.394875 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27" event={"ID":"24a39a3f-a75f-4029-b861-cf683db5aae2","Type":"ContainerStarted","Data":"e04072291e08db70b382a03e1fbd55c9701431a7f8104f1e7492b2f7a3fc5033"} Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.396345 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6fzpq"] Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.396974 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4cjnp" event={"ID":"9f7c1634-c25d-4fc1-92bd-d95ef05c7868","Type":"ContainerStarted","Data":"b546916766dd30fc2b6828301f9ec8b8a85d9ab7ec374ffcc0b88578e90a4614"} Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.398165 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm" event={"ID":"d9951bd4-0756-4c79-96b0-ceaac8a1e51a","Type":"ContainerStarted","Data":"6ee2ce7b683fc7c6dcbdc7ba27b23e48a4ee18e26cc1a3a5b5b1ddc32842dc3f"} Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.404222 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx" event={"ID":"3c3eb81e-314d-486d-afa4-443f33c54510","Type":"ContainerStarted","Data":"171b94fbe0ce37de7f93cda9aefbfd89165db7ef35173c5faa7afd51236c20e9"} Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.404496 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm" podUID="d9951bd4-0756-4c79-96b0-ceaac8a1e51a" Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.413828 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9" event={"ID":"58adadbb-3706-4f8c-be33-31836f4860e5","Type":"ContainerStarted","Data":"a6023ce24e939ba5fb7a4692dc80c1de2fb04c71dd97e4d181080a245436203e"} Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.419134 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g" event={"ID":"e4cadc4e-ebfd-4886-83a2-1caf4aef2b68","Type":"ContainerStarted","Data":"6de2881b28d0a4ca0134863a551fcb6b1a388f6c9241c36128ff93ba4332429c"} Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.420021 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s" event={"ID":"c2db30ef-0db3-44d4-b276-3b81195d4962","Type":"ContainerStarted","Data":"937034f3d96bc79001adff6d67b8a0ca74c7f482468b15e7c48331f0774ff033"} Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.424085 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69" event={"ID":"b8715491-d469-4ade-8434-765685a955db","Type":"ContainerStarted","Data":"0d38f9c9b08c278f03eb47e66f46de095b880c8a3571fd5859e7fec172db0293"} Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.428517 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4" event={"ID":"5a8351bf-c4cf-40fc-8df9-22b3064770a3","Type":"ContainerStarted","Data":"3da7b6922c49d6bcc64805b590c5172dc1aaa17a8ba986c02c7e09819d112da0"} Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.430120 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs" event={"ID":"797a523a-540b-4b10-a294-0543192f0c56","Type":"ContainerStarted","Data":"717f8aa786f1e3c1ee0d36ac55480475c7d56006574480a02ef99c9afa053add"} Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.434644 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs" podUID="797a523a-540b-4b10-a294-0543192f0c56" Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.434766 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd" event={"ID":"39305f1e-8b3f-43aa-97d4-48410cc7fe91","Type":"ContainerStarted","Data":"0ccb5d8944525b6522bfbbb94cac30d7735b0e402bc197a26ae0206df433642c"} Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.437161 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6m9bb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-w62v7_openstack-operators(1ac4eade-01c8-4323-8796-6b2d39a7ee36): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.438330 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jbfkr" event={"ID":"99188e23-da4a-4d43-8778-a2a0b9e962dc","Type":"ContainerStarted","Data":"b1b380f9d74e555787093ecc60da08c77f4762092a89aac68d49c91267ac40dc"} Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.445462 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6m9bb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-w62v7_openstack-operators(1ac4eade-01c8-4323-8796-6b2d39a7ee36): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.445648 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" event={"ID":"2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2","Type":"ContainerStarted","Data":"2cf8196596eae60af0d8924b9d2fdf86a415ee603cad8b533bc8c1d4fb97ec54"} Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.446596 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" podUID="1ac4eade-01c8-4323-8796-6b2d39a7ee36" Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.449529 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-snwlr" event={"ID":"b52c7da9-b392-448b-a04a-1afa333df442","Type":"ContainerStarted","Data":"c30aaddcec4469f287cbcd9f026b665b5e849a80f502255b1a434027c1c62892"} Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.449525 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" podUID="2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2" Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.451029 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p" event={"ID":"ec8611ec-2e0f-4906-af03-7dc350e7e783","Type":"ContainerStarted","Data":"19b4a443585e2afeb82f563e72a34aa1bb08a6a31aeeaf50fd8d753a56a838b6"} Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.452096 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb" event={"ID":"790a3a61-40c9-4360-b7b6-9f08edbec437","Type":"ContainerStarted","Data":"459210d0cc4e0431cf89d7d87814c871904f37cd73b8cb385d3a239cb1ad480c"} Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.844018 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:41 crc kubenswrapper[4612]: I1203 07:41:41.844175 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.844313 4612 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.844382 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs podName:f6260167-bcb0-4f9e-8a44-6cd47d248296 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:43.844362272 +0000 UTC m=+867.017719672 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs") pod "openstack-operator-controller-manager-64f7f94cfd-bdkfw" (UID: "f6260167-bcb0-4f9e-8a44-6cd47d248296") : secret "metrics-server-cert" not found Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.844313 4612 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 07:41:41 crc kubenswrapper[4612]: E1203 07:41:41.844460 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs podName:f6260167-bcb0-4f9e-8a44-6cd47d248296 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:43.844449304 +0000 UTC m=+867.017806704 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs") pod "openstack-operator-controller-manager-64f7f94cfd-bdkfw" (UID: "f6260167-bcb0-4f9e-8a44-6cd47d248296") : secret "webhook-server-cert" not found Dec 03 07:41:42 crc kubenswrapper[4612]: I1203 07:41:42.443140 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:42 crc kubenswrapper[4612]: I1203 07:41:42.443202 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:42 crc kubenswrapper[4612]: I1203 07:41:42.460447 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6fzpq" event={"ID":"8fd04295-8c24-459f-b2d5-1fee88165e78","Type":"ContainerStarted","Data":"7776df0e355117d541080cf3204049b7a5590aac0364c21e05cf320dc58dcc6c"} Dec 03 07:41:42 crc kubenswrapper[4612]: I1203 07:41:42.465820 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" event={"ID":"1ac4eade-01c8-4323-8796-6b2d39a7ee36","Type":"ContainerStarted","Data":"23c6532de992e37aaa4c05c117688b4f949bf5f924f787a6c6f389b84e1b71c8"} Dec 03 07:41:42 crc kubenswrapper[4612]: E1203 07:41:42.469170 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" podUID="1ac4eade-01c8-4323-8796-6b2d39a7ee36" Dec 03 07:41:42 crc kubenswrapper[4612]: E1203 07:41:42.469594 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" podUID="2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2" Dec 03 07:41:42 crc kubenswrapper[4612]: E1203 07:41:42.471593 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm" podUID="d9951bd4-0756-4c79-96b0-ceaac8a1e51a" Dec 03 07:41:42 crc kubenswrapper[4612]: E1203 07:41:42.472633 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs" podUID="797a523a-540b-4b10-a294-0543192f0c56" Dec 03 07:41:42 crc kubenswrapper[4612]: I1203 07:41:42.557511 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:42 crc kubenswrapper[4612]: I1203 07:41:42.785116 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert\") pod \"infra-operator-controller-manager-57548d458d-rdf2f\" (UID: \"d70b0d51-8225-4d2b-b128-aeda29446ab9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:41:42 crc kubenswrapper[4612]: E1203 07:41:42.785275 4612 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 07:41:42 crc kubenswrapper[4612]: E1203 07:41:42.785323 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert podName:d70b0d51-8225-4d2b-b128-aeda29446ab9 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:46.785308294 +0000 UTC m=+869.958665694 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert") pod "infra-operator-controller-manager-57548d458d-rdf2f" (UID: "d70b0d51-8225-4d2b-b128-aeda29446ab9") : secret "infra-operator-webhook-server-cert" not found Dec 03 07:41:42 crc kubenswrapper[4612]: I1203 07:41:42.803214 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:42 crc kubenswrapper[4612]: I1203 07:41:42.899130 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c9h9z"] Dec 03 07:41:43 crc kubenswrapper[4612]: I1203 07:41:43.191844 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9\" (UID: \"a9a1ffeb-b3b4-4b07-911b-b829962b6827\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:41:43 crc kubenswrapper[4612]: E1203 07:41:43.192688 4612 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 07:41:43 crc kubenswrapper[4612]: E1203 07:41:43.192884 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert podName:a9a1ffeb-b3b4-4b07-911b-b829962b6827 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:47.192866925 +0000 UTC m=+870.366224325 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" (UID: "a9a1ffeb-b3b4-4b07-911b-b829962b6827") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 07:41:43 crc kubenswrapper[4612]: E1203 07:41:43.477121 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" podUID="1ac4eade-01c8-4323-8796-6b2d39a7ee36" Dec 03 07:41:43 crc kubenswrapper[4612]: I1203 07:41:43.914211 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:43 crc kubenswrapper[4612]: I1203 07:41:43.914274 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:43 crc kubenswrapper[4612]: E1203 07:41:43.914409 4612 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 07:41:43 crc kubenswrapper[4612]: E1203 07:41:43.914443 4612 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 07:41:43 crc kubenswrapper[4612]: E1203 07:41:43.914481 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs podName:f6260167-bcb0-4f9e-8a44-6cd47d248296 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:47.914464859 +0000 UTC m=+871.087822259 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs") pod "openstack-operator-controller-manager-64f7f94cfd-bdkfw" (UID: "f6260167-bcb0-4f9e-8a44-6cd47d248296") : secret "webhook-server-cert" not found Dec 03 07:41:43 crc kubenswrapper[4612]: E1203 07:41:43.914499 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs podName:f6260167-bcb0-4f9e-8a44-6cd47d248296 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:47.91449243 +0000 UTC m=+871.087849830 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs") pod "openstack-operator-controller-manager-64f7f94cfd-bdkfw" (UID: "f6260167-bcb0-4f9e-8a44-6cd47d248296") : secret "metrics-server-cert" not found Dec 03 07:41:44 crc kubenswrapper[4612]: I1203 07:41:44.496164 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c9h9z" podUID="efd99d0c-82cb-4075-b1f5-4c0c9d03b721" containerName="registry-server" containerID="cri-o://1f0f516d72bbda99e8b05b9e514dafa46ebe8cd3264e9fce750b1c4fe4fea0b5" gracePeriod=2 Dec 03 07:41:45 crc kubenswrapper[4612]: I1203 07:41:45.546131 4612 generic.go:334] "Generic (PLEG): container finished" podID="efd99d0c-82cb-4075-b1f5-4c0c9d03b721" containerID="1f0f516d72bbda99e8b05b9e514dafa46ebe8cd3264e9fce750b1c4fe4fea0b5" exitCode=0 Dec 03 07:41:45 crc kubenswrapper[4612]: I1203 07:41:45.546236 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9h9z" event={"ID":"efd99d0c-82cb-4075-b1f5-4c0c9d03b721","Type":"ContainerDied","Data":"1f0f516d72bbda99e8b05b9e514dafa46ebe8cd3264e9fce750b1c4fe4fea0b5"} Dec 03 07:41:46 crc kubenswrapper[4612]: I1203 07:41:46.870380 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert\") pod \"infra-operator-controller-manager-57548d458d-rdf2f\" (UID: \"d70b0d51-8225-4d2b-b128-aeda29446ab9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:41:46 crc kubenswrapper[4612]: E1203 07:41:46.870663 4612 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 07:41:46 crc kubenswrapper[4612]: E1203 07:41:46.870770 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert podName:d70b0d51-8225-4d2b-b128-aeda29446ab9 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:54.870742832 +0000 UTC m=+878.044100282 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert") pod "infra-operator-controller-manager-57548d458d-rdf2f" (UID: "d70b0d51-8225-4d2b-b128-aeda29446ab9") : secret "infra-operator-webhook-server-cert" not found Dec 03 07:41:47 crc kubenswrapper[4612]: I1203 07:41:47.275461 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9\" (UID: \"a9a1ffeb-b3b4-4b07-911b-b829962b6827\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:41:47 crc kubenswrapper[4612]: E1203 07:41:47.276297 4612 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 07:41:47 crc kubenswrapper[4612]: E1203 07:41:47.276450 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert podName:a9a1ffeb-b3b4-4b07-911b-b829962b6827 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:55.276420025 +0000 UTC m=+878.449777425 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" (UID: "a9a1ffeb-b3b4-4b07-911b-b829962b6827") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 07:41:47 crc kubenswrapper[4612]: I1203 07:41:47.984480 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:47 crc kubenswrapper[4612]: I1203 07:41:47.984546 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:47 crc kubenswrapper[4612]: E1203 07:41:47.984756 4612 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 07:41:47 crc kubenswrapper[4612]: E1203 07:41:47.984811 4612 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 07:41:47 crc kubenswrapper[4612]: E1203 07:41:47.984821 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs podName:f6260167-bcb0-4f9e-8a44-6cd47d248296 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:55.984800219 +0000 UTC m=+879.158157619 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs") pod "openstack-operator-controller-manager-64f7f94cfd-bdkfw" (UID: "f6260167-bcb0-4f9e-8a44-6cd47d248296") : secret "metrics-server-cert" not found Dec 03 07:41:47 crc kubenswrapper[4612]: E1203 07:41:47.984937 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs podName:f6260167-bcb0-4f9e-8a44-6cd47d248296 nodeName:}" failed. No retries permitted until 2025-12-03 07:41:55.984910652 +0000 UTC m=+879.158268052 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs") pod "openstack-operator-controller-manager-64f7f94cfd-bdkfw" (UID: "f6260167-bcb0-4f9e-8a44-6cd47d248296") : secret "webhook-server-cert" not found Dec 03 07:41:49 crc kubenswrapper[4612]: I1203 07:41:49.960702 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:50 crc kubenswrapper[4612]: I1203 07:41:50.116349 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-utilities\") pod \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\" (UID: \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\") " Dec 03 07:41:50 crc kubenswrapper[4612]: I1203 07:41:50.116415 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nb75f\" (UniqueName: \"kubernetes.io/projected/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-kube-api-access-nb75f\") pod \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\" (UID: \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\") " Dec 03 07:41:50 crc kubenswrapper[4612]: I1203 07:41:50.116434 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-catalog-content\") pod \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\" (UID: \"efd99d0c-82cb-4075-b1f5-4c0c9d03b721\") " Dec 03 07:41:50 crc kubenswrapper[4612]: I1203 07:41:50.117531 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-utilities" (OuterVolumeSpecName: "utilities") pod "efd99d0c-82cb-4075-b1f5-4c0c9d03b721" (UID: "efd99d0c-82cb-4075-b1f5-4c0c9d03b721"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:41:50 crc kubenswrapper[4612]: I1203 07:41:50.127111 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-kube-api-access-nb75f" (OuterVolumeSpecName: "kube-api-access-nb75f") pod "efd99d0c-82cb-4075-b1f5-4c0c9d03b721" (UID: "efd99d0c-82cb-4075-b1f5-4c0c9d03b721"). InnerVolumeSpecName "kube-api-access-nb75f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:41:50 crc kubenswrapper[4612]: I1203 07:41:50.177344 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "efd99d0c-82cb-4075-b1f5-4c0c9d03b721" (UID: "efd99d0c-82cb-4075-b1f5-4c0c9d03b721"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:41:50 crc kubenswrapper[4612]: I1203 07:41:50.218443 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:41:50 crc kubenswrapper[4612]: I1203 07:41:50.218475 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:41:50 crc kubenswrapper[4612]: I1203 07:41:50.218485 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nb75f\" (UniqueName: \"kubernetes.io/projected/efd99d0c-82cb-4075-b1f5-4c0c9d03b721-kube-api-access-nb75f\") on node \"crc\" DevicePath \"\"" Dec 03 07:41:50 crc kubenswrapper[4612]: I1203 07:41:50.594762 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9h9z" event={"ID":"efd99d0c-82cb-4075-b1f5-4c0c9d03b721","Type":"ContainerDied","Data":"922da0a1f1ca333d5924ef7b7b5a7468a1761e458e3cfb07e9a321545f34e76a"} Dec 03 07:41:50 crc kubenswrapper[4612]: I1203 07:41:50.595129 4612 scope.go:117] "RemoveContainer" containerID="1f0f516d72bbda99e8b05b9e514dafa46ebe8cd3264e9fce750b1c4fe4fea0b5" Dec 03 07:41:50 crc kubenswrapper[4612]: I1203 07:41:50.595031 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9h9z" Dec 03 07:41:50 crc kubenswrapper[4612]: I1203 07:41:50.635677 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c9h9z"] Dec 03 07:41:50 crc kubenswrapper[4612]: I1203 07:41:50.640666 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c9h9z"] Dec 03 07:41:51 crc kubenswrapper[4612]: I1203 07:41:51.097042 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efd99d0c-82cb-4075-b1f5-4c0c9d03b721" path="/var/lib/kubelet/pods/efd99d0c-82cb-4075-b1f5-4c0c9d03b721/volumes" Dec 03 07:41:54 crc kubenswrapper[4612]: I1203 07:41:54.909202 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert\") pod \"infra-operator-controller-manager-57548d458d-rdf2f\" (UID: \"d70b0d51-8225-4d2b-b128-aeda29446ab9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:41:54 crc kubenswrapper[4612]: E1203 07:41:54.909403 4612 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 07:41:54 crc kubenswrapper[4612]: E1203 07:41:54.910362 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert podName:d70b0d51-8225-4d2b-b128-aeda29446ab9 nodeName:}" failed. No retries permitted until 2025-12-03 07:42:10.910341268 +0000 UTC m=+894.083698678 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert") pod "infra-operator-controller-manager-57548d458d-rdf2f" (UID: "d70b0d51-8225-4d2b-b128-aeda29446ab9") : secret "infra-operator-webhook-server-cert" not found Dec 03 07:41:55 crc kubenswrapper[4612]: I1203 07:41:55.316297 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9\" (UID: \"a9a1ffeb-b3b4-4b07-911b-b829962b6827\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:41:55 crc kubenswrapper[4612]: I1203 07:41:55.320757 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a9a1ffeb-b3b4-4b07-911b-b829962b6827-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9\" (UID: \"a9a1ffeb-b3b4-4b07-911b-b829962b6827\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:41:55 crc kubenswrapper[4612]: I1203 07:41:55.436984 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:41:56 crc kubenswrapper[4612]: I1203 07:41:56.026293 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:56 crc kubenswrapper[4612]: I1203 07:41:56.026659 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:56 crc kubenswrapper[4612]: I1203 07:41:56.043581 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-metrics-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:56 crc kubenswrapper[4612]: I1203 07:41:56.047611 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f6260167-bcb0-4f9e-8a44-6cd47d248296-webhook-certs\") pod \"openstack-operator-controller-manager-64f7f94cfd-bdkfw\" (UID: \"f6260167-bcb0-4f9e-8a44-6cd47d248296\") " pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:56 crc kubenswrapper[4612]: I1203 07:41:56.217500 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:41:56 crc kubenswrapper[4612]: E1203 07:41:56.611356 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7" Dec 03 07:41:56 crc kubenswrapper[4612]: E1203 07:41:56.611505 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2gwh5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-m898g_openstack-operators(e4cadc4e-ebfd-4886-83a2-1caf4aef2b68): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:41:59 crc kubenswrapper[4612]: E1203 07:41:59.029438 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530" Dec 03 07:41:59 crc kubenswrapper[4612]: E1203 07:41:59.029977 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ns9s8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-6mbrd_openstack-operators(39305f1e-8b3f-43aa-97d4-48410cc7fe91): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:41:59 crc kubenswrapper[4612]: E1203 07:41:59.617105 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429" Dec 03 07:41:59 crc kubenswrapper[4612]: E1203 07:41:59.617607 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lvpk2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-5rg69_openstack-operators(b8715491-d469-4ade-8434-765685a955db): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.784132 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jpt4j"] Dec 03 07:42:00 crc kubenswrapper[4612]: E1203 07:42:00.784472 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efd99d0c-82cb-4075-b1f5-4c0c9d03b721" containerName="extract-content" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.784487 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="efd99d0c-82cb-4075-b1f5-4c0c9d03b721" containerName="extract-content" Dec 03 07:42:00 crc kubenswrapper[4612]: E1203 07:42:00.784496 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efd99d0c-82cb-4075-b1f5-4c0c9d03b721" containerName="registry-server" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.784503 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="efd99d0c-82cb-4075-b1f5-4c0c9d03b721" containerName="registry-server" Dec 03 07:42:00 crc kubenswrapper[4612]: E1203 07:42:00.784526 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efd99d0c-82cb-4075-b1f5-4c0c9d03b721" containerName="extract-utilities" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.784534 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="efd99d0c-82cb-4075-b1f5-4c0c9d03b721" containerName="extract-utilities" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.784679 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="efd99d0c-82cb-4075-b1f5-4c0c9d03b721" containerName="registry-server" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.785870 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.799845 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-catalog-content\") pod \"redhat-marketplace-jpt4j\" (UID: \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\") " pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.799905 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp6vs\" (UniqueName: \"kubernetes.io/projected/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-kube-api-access-rp6vs\") pod \"redhat-marketplace-jpt4j\" (UID: \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\") " pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.800051 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-utilities\") pod \"redhat-marketplace-jpt4j\" (UID: \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\") " pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.865166 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpt4j"] Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.903976 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp6vs\" (UniqueName: \"kubernetes.io/projected/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-kube-api-access-rp6vs\") pod \"redhat-marketplace-jpt4j\" (UID: \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\") " pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.904316 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-utilities\") pod \"redhat-marketplace-jpt4j\" (UID: \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\") " pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.904380 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-catalog-content\") pod \"redhat-marketplace-jpt4j\" (UID: \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\") " pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.904808 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-catalog-content\") pod \"redhat-marketplace-jpt4j\" (UID: \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\") " pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.905264 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-utilities\") pod \"redhat-marketplace-jpt4j\" (UID: \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\") " pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:00 crc kubenswrapper[4612]: I1203 07:42:00.927725 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp6vs\" (UniqueName: \"kubernetes.io/projected/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-kube-api-access-rp6vs\") pod \"redhat-marketplace-jpt4j\" (UID: \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\") " pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:01 crc kubenswrapper[4612]: I1203 07:42:01.101278 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:01 crc kubenswrapper[4612]: E1203 07:42:01.430522 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94" Dec 03 07:42:01 crc kubenswrapper[4612]: E1203 07:42:01.431007 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5pzw6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-snwlr_openstack-operators(b52c7da9-b392-448b-a04a-1afa333df442): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:42:02 crc kubenswrapper[4612]: E1203 07:42:02.052323 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d" Dec 03 07:42:02 crc kubenswrapper[4612]: E1203 07:42:02.052482 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g26sh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-kv6n4_openstack-operators(5a8351bf-c4cf-40fc-8df9-22b3064770a3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:42:03 crc kubenswrapper[4612]: E1203 07:42:03.959129 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801" Dec 03 07:42:03 crc kubenswrapper[4612]: E1203 07:42:03.960520 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4j442,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-859b6ccc6-2pxgx_openstack-operators(3c3eb81e-314d-486d-afa4-443f33c54510): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:42:04 crc kubenswrapper[4612]: E1203 07:42:04.876699 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Dec 03 07:42:04 crc kubenswrapper[4612]: E1203 07:42:04.876907 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sptwj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-b2s4p_openstack-operators(ec8611ec-2e0f-4906-af03-7dc350e7e783): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:42:06 crc kubenswrapper[4612]: E1203 07:42:06.658809 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea" Dec 03 07:42:06 crc kubenswrapper[4612]: E1203 07:42:06.659041 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-f9g4h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-n2rzg_openstack-operators(3d3cecf1-2f48-4b22-9350-870d25e786ef): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:42:07 crc kubenswrapper[4612]: E1203 07:42:07.168306 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 03 07:42:07 crc kubenswrapper[4612]: E1203 07:42:07.168495 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g6prc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-bs99s_openstack-operators(c2db30ef-0db3-44d4-b276-3b81195d4962): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:42:07 crc kubenswrapper[4612]: E1203 07:42:07.808354 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9" Dec 03 07:42:07 crc kubenswrapper[4612]: E1203 07:42:07.808697 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b6tj7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-d98bb_openstack-operators(790a3a61-40c9-4360-b7b6-9f08edbec437): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:42:08 crc kubenswrapper[4612]: E1203 07:42:08.370072 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 03 07:42:08 crc kubenswrapper[4612]: E1203 07:42:08.370258 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h8d9z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-flkb9_openstack-operators(58adadbb-3706-4f8c-be33-31836f4860e5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:42:09 crc kubenswrapper[4612]: E1203 07:42:09.044174 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 03 07:42:09 crc kubenswrapper[4612]: E1203 07:42:09.044587 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-v6mcw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-4vp27_openstack-operators(24a39a3f-a75f-4029-b861-cf683db5aae2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:42:10 crc kubenswrapper[4612]: I1203 07:42:10.964892 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert\") pod \"infra-operator-controller-manager-57548d458d-rdf2f\" (UID: \"d70b0d51-8225-4d2b-b128-aeda29446ab9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:42:10 crc kubenswrapper[4612]: I1203 07:42:10.974269 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d70b0d51-8225-4d2b-b128-aeda29446ab9-cert\") pod \"infra-operator-controller-manager-57548d458d-rdf2f\" (UID: \"d70b0d51-8225-4d2b-b128-aeda29446ab9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:42:11 crc kubenswrapper[4612]: I1203 07:42:11.089119 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:42:15 crc kubenswrapper[4612]: E1203 07:42:15.120462 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385" Dec 03 07:42:15 crc kubenswrapper[4612]: E1203 07:42:15.121169 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-clhxj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-jbbw2_openstack-operators(2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:42:15 crc kubenswrapper[4612]: E1203 07:42:15.742551 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 03 07:42:15 crc kubenswrapper[4612]: E1203 07:42:15.743127 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6m9bb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-w62v7_openstack-operators(1ac4eade-01c8-4323-8796-6b2d39a7ee36): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:42:15 crc kubenswrapper[4612]: I1203 07:42:15.754707 4612 scope.go:117] "RemoveContainer" containerID="47d952ee0092cdde4f1c79153914b06161937347a407d3c75bbe9bdc22c5f7fa" Dec 03 07:42:16 crc kubenswrapper[4612]: I1203 07:42:16.040157 4612 scope.go:117] "RemoveContainer" containerID="77a0c9ccda3b8163e3781e9484b6a39debcd141c3e6ab802b9d3c33e8548afc7" Dec 03 07:42:16 crc kubenswrapper[4612]: I1203 07:42:16.351742 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpt4j"] Dec 03 07:42:16 crc kubenswrapper[4612]: I1203 07:42:16.359080 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw"] Dec 03 07:42:16 crc kubenswrapper[4612]: I1203 07:42:16.419106 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9"] Dec 03 07:42:16 crc kubenswrapper[4612]: I1203 07:42:16.570115 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f"] Dec 03 07:42:16 crc kubenswrapper[4612]: W1203 07:42:16.846779 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9a1ffeb_b3b4_4b07_911b_b829962b6827.slice/crio-7c43f9151604be22bf8df6aed4dab1ebc8d6e5a7df8b735d9519984561710a0e WatchSource:0}: Error finding container 7c43f9151604be22bf8df6aed4dab1ebc8d6e5a7df8b735d9519984561710a0e: Status 404 returned error can't find the container with id 7c43f9151604be22bf8df6aed4dab1ebc8d6e5a7df8b735d9519984561710a0e Dec 03 07:42:16 crc kubenswrapper[4612]: W1203 07:42:16.848025 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd70b0d51_8225_4d2b_b128_aeda29446ab9.slice/crio-4193dd6a5cba43e7d8de895f88c4c56af0accdb74e8bd9bf2bcf4bab667ba595 WatchSource:0}: Error finding container 4193dd6a5cba43e7d8de895f88c4c56af0accdb74e8bd9bf2bcf4bab667ba595: Status 404 returned error can't find the container with id 4193dd6a5cba43e7d8de895f88c4c56af0accdb74e8bd9bf2bcf4bab667ba595 Dec 03 07:42:16 crc kubenswrapper[4612]: W1203 07:42:16.849795 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf6260167_bcb0_4f9e_8a44_6cd47d248296.slice/crio-fd7d1bde2bb5f4de24320a098564ceb4c469b524cbbc02b2152303f7b3a294ee WatchSource:0}: Error finding container fd7d1bde2bb5f4de24320a098564ceb4c469b524cbbc02b2152303f7b3a294ee: Status 404 returned error can't find the container with id fd7d1bde2bb5f4de24320a098564ceb4c469b524cbbc02b2152303f7b3a294ee Dec 03 07:42:16 crc kubenswrapper[4612]: W1203 07:42:16.853625 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fcfa4b1_43d4_4c2c_a38d_5516a6383a8b.slice/crio-dc4f9f41ecdf8e952c1054e3b0ab9bacdc7389f00a85668ae2cada329286c900 WatchSource:0}: Error finding container dc4f9f41ecdf8e952c1054e3b0ab9bacdc7389f00a85668ae2cada329286c900: Status 404 returned error can't find the container with id dc4f9f41ecdf8e952c1054e3b0ab9bacdc7389f00a85668ae2cada329286c900 Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.135763 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.135816 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.735928 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tm992"] Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.785158 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.786359 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tm992"] Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.821165 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" event={"ID":"a9a1ffeb-b3b4-4b07-911b-b829962b6827","Type":"ContainerStarted","Data":"7c43f9151604be22bf8df6aed4dab1ebc8d6e5a7df8b735d9519984561710a0e"} Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.857488 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" event={"ID":"f6260167-bcb0-4f9e-8a44-6cd47d248296","Type":"ContainerStarted","Data":"d338a2eb54e43f96ea909d2e75bb312e2bc471e19a8df2eb14174f1821d2640c"} Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.857547 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" event={"ID":"f6260167-bcb0-4f9e-8a44-6cd47d248296","Type":"ContainerStarted","Data":"fd7d1bde2bb5f4de24320a098564ceb4c469b524cbbc02b2152303f7b3a294ee"} Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.889151 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4cjnp" event={"ID":"9f7c1634-c25d-4fc1-92bd-d95ef05c7868","Type":"ContainerStarted","Data":"800703baa1f4db785f358720065b6eeb461d7d3bab9781ab26c0bd4f56f78a66"} Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.891166 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tklpk" event={"ID":"d75ef15b-d718-436d-b570-21416a0c4021","Type":"ContainerStarted","Data":"c59f06cf8262931fdc604617f91162cb404a7e4d44e47dfa3af6ac8ebc1fe9e1"} Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.897680 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrkj7\" (UniqueName: \"kubernetes.io/projected/ff95ca02-ac62-4dfe-9a98-c6888d694e95-kube-api-access-lrkj7\") pod \"redhat-operators-tm992\" (UID: \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\") " pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.898208 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff95ca02-ac62-4dfe-9a98-c6888d694e95-catalog-content\") pod \"redhat-operators-tm992\" (UID: \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\") " pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.898483 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff95ca02-ac62-4dfe-9a98-c6888d694e95-utilities\") pod \"redhat-operators-tm992\" (UID: \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\") " pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.914793 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" event={"ID":"d70b0d51-8225-4d2b-b128-aeda29446ab9","Type":"ContainerStarted","Data":"4193dd6a5cba43e7d8de895f88c4c56af0accdb74e8bd9bf2bcf4bab667ba595"} Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.920574 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs" event={"ID":"797a523a-540b-4b10-a294-0543192f0c56","Type":"ContainerStarted","Data":"a301244ebf118bf691a45ef99f8f03eedb1434264a51786775619e8cdad3f7de"} Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.923689 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpt4j" event={"ID":"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b","Type":"ContainerStarted","Data":"dc4f9f41ecdf8e952c1054e3b0ab9bacdc7389f00a85668ae2cada329286c900"} Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.926997 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jbfkr" event={"ID":"99188e23-da4a-4d43-8778-a2a0b9e962dc","Type":"ContainerStarted","Data":"64a67de267382bd8fea68d9cdb1bb7da7c9d1dc0b3331555da21b36c9d5a3d47"} Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.930642 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6fzpq" event={"ID":"8fd04295-8c24-459f-b2d5-1fee88165e78","Type":"ContainerStarted","Data":"fba6f2e85be3d1202edb054260ed45d9192e26514a758381934759299b0afecb"} Dec 03 07:42:17 crc kubenswrapper[4612]: I1203 07:42:17.957561 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6fzpq" podStartSLOduration=10.319879337 podStartE2EDuration="37.957544788s" podCreationTimestamp="2025-12-03 07:41:40 +0000 UTC" firstStartedPulling="2025-12-03 07:41:41.412836393 +0000 UTC m=+864.586193793" lastFinishedPulling="2025-12-03 07:42:09.050501844 +0000 UTC m=+892.223859244" observedRunningTime="2025-12-03 07:42:17.953748393 +0000 UTC m=+901.127105793" watchObservedRunningTime="2025-12-03 07:42:17.957544788 +0000 UTC m=+901.130902188" Dec 03 07:42:18 crc kubenswrapper[4612]: I1203 07:42:18.013652 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrkj7\" (UniqueName: \"kubernetes.io/projected/ff95ca02-ac62-4dfe-9a98-c6888d694e95-kube-api-access-lrkj7\") pod \"redhat-operators-tm992\" (UID: \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\") " pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:18 crc kubenswrapper[4612]: I1203 07:42:18.014084 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff95ca02-ac62-4dfe-9a98-c6888d694e95-catalog-content\") pod \"redhat-operators-tm992\" (UID: \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\") " pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:18 crc kubenswrapper[4612]: I1203 07:42:18.014215 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff95ca02-ac62-4dfe-9a98-c6888d694e95-utilities\") pod \"redhat-operators-tm992\" (UID: \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\") " pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:18 crc kubenswrapper[4612]: I1203 07:42:18.014639 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff95ca02-ac62-4dfe-9a98-c6888d694e95-utilities\") pod \"redhat-operators-tm992\" (UID: \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\") " pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:18 crc kubenswrapper[4612]: I1203 07:42:18.015612 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff95ca02-ac62-4dfe-9a98-c6888d694e95-catalog-content\") pod \"redhat-operators-tm992\" (UID: \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\") " pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:18 crc kubenswrapper[4612]: I1203 07:42:18.049822 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrkj7\" (UniqueName: \"kubernetes.io/projected/ff95ca02-ac62-4dfe-9a98-c6888d694e95-kube-api-access-lrkj7\") pod \"redhat-operators-tm992\" (UID: \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\") " pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:18 crc kubenswrapper[4612]: I1203 07:42:18.133610 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:18 crc kubenswrapper[4612]: I1203 07:42:18.945689 4612 generic.go:334] "Generic (PLEG): container finished" podID="3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" containerID="4029cc618cea85283b8c14e191b2ac0c0264087a5952d14847fb66489be81669" exitCode=0 Dec 03 07:42:18 crc kubenswrapper[4612]: I1203 07:42:18.945873 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpt4j" event={"ID":"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b","Type":"ContainerDied","Data":"4029cc618cea85283b8c14e191b2ac0c0264087a5952d14847fb66489be81669"} Dec 03 07:42:18 crc kubenswrapper[4612]: I1203 07:42:18.948564 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm" event={"ID":"d9951bd4-0756-4c79-96b0-ceaac8a1e51a","Type":"ContainerStarted","Data":"f206789fa9884768c1f021ca69dcaf006a14cfb63abbcf30be9623a3990fca19"} Dec 03 07:42:18 crc kubenswrapper[4612]: I1203 07:42:18.948600 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:42:19 crc kubenswrapper[4612]: I1203 07:42:19.014712 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" podStartSLOduration=40.014689043 podStartE2EDuration="40.014689043s" podCreationTimestamp="2025-12-03 07:41:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:42:19.004274373 +0000 UTC m=+902.177631773" watchObservedRunningTime="2025-12-03 07:42:19.014689043 +0000 UTC m=+902.188046463" Dec 03 07:42:21 crc kubenswrapper[4612]: E1203 07:42:21.106244 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p" podUID="ec8611ec-2e0f-4906-af03-7dc350e7e783" Dec 03 07:42:21 crc kubenswrapper[4612]: I1203 07:42:21.330355 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tm992"] Dec 03 07:42:21 crc kubenswrapper[4612]: E1203 07:42:21.833404 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb" podUID="790a3a61-40c9-4360-b7b6-9f08edbec437" Dec 03 07:42:21 crc kubenswrapper[4612]: I1203 07:42:21.985689 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p" event={"ID":"ec8611ec-2e0f-4906-af03-7dc350e7e783","Type":"ContainerStarted","Data":"58f5f590c517acdcaf0ac2b916b241e8278d6912776cbc64581a3087a2ae5139"} Dec 03 07:42:21 crc kubenswrapper[4612]: I1203 07:42:21.987523 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb" event={"ID":"790a3a61-40c9-4360-b7b6-9f08edbec437","Type":"ContainerStarted","Data":"c7ca09ae84ee8f227d2c736c3674dbc44b543db4f24569399e162b6bc74b109c"} Dec 03 07:42:21 crc kubenswrapper[4612]: I1203 07:42:21.989258 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tm992" event={"ID":"ff95ca02-ac62-4dfe-9a98-c6888d694e95","Type":"ContainerStarted","Data":"5a57bb7707164313393cb7c78a2425e54f03a60a0411a0bbb29b0c120accdd6f"} Dec 03 07:42:21 crc kubenswrapper[4612]: I1203 07:42:21.995025 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4cjnp" event={"ID":"9f7c1634-c25d-4fc1-92bd-d95ef05c7868","Type":"ContainerStarted","Data":"c5475dae7796833f50743673e7433486993c020dfae39d2a3f521626f0c5493d"} Dec 03 07:42:21 crc kubenswrapper[4612]: I1203 07:42:21.995073 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4cjnp" Dec 03 07:42:22 crc kubenswrapper[4612]: I1203 07:42:22.053417 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4cjnp" podStartSLOduration=4.414945633 podStartE2EDuration="44.053399234s" podCreationTimestamp="2025-12-03 07:41:38 +0000 UTC" firstStartedPulling="2025-12-03 07:41:41.194800607 +0000 UTC m=+864.368158007" lastFinishedPulling="2025-12-03 07:42:20.833254208 +0000 UTC m=+904.006611608" observedRunningTime="2025-12-03 07:42:22.049471636 +0000 UTC m=+905.222829036" watchObservedRunningTime="2025-12-03 07:42:22.053399234 +0000 UTC m=+905.226756634" Dec 03 07:42:23 crc kubenswrapper[4612]: I1203 07:42:23.002272 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4cjnp" Dec 03 07:42:23 crc kubenswrapper[4612]: I1203 07:42:23.360158 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6hrsn"] Dec 03 07:42:23 crc kubenswrapper[4612]: I1203 07:42:23.361536 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:23 crc kubenswrapper[4612]: I1203 07:42:23.387251 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6hrsn"] Dec 03 07:42:23 crc kubenswrapper[4612]: I1203 07:42:23.511596 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fbdf1d-c06a-4145-8582-ff280680b2d5-utilities\") pod \"community-operators-6hrsn\" (UID: \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\") " pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:23 crc kubenswrapper[4612]: I1203 07:42:23.511676 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fbdf1d-c06a-4145-8582-ff280680b2d5-catalog-content\") pod \"community-operators-6hrsn\" (UID: \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\") " pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:23 crc kubenswrapper[4612]: I1203 07:42:23.511821 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6kc5\" (UniqueName: \"kubernetes.io/projected/c5fbdf1d-c06a-4145-8582-ff280680b2d5-kube-api-access-x6kc5\") pod \"community-operators-6hrsn\" (UID: \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\") " pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:23 crc kubenswrapper[4612]: I1203 07:42:23.612706 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6kc5\" (UniqueName: \"kubernetes.io/projected/c5fbdf1d-c06a-4145-8582-ff280680b2d5-kube-api-access-x6kc5\") pod \"community-operators-6hrsn\" (UID: \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\") " pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:23 crc kubenswrapper[4612]: I1203 07:42:23.612807 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fbdf1d-c06a-4145-8582-ff280680b2d5-utilities\") pod \"community-operators-6hrsn\" (UID: \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\") " pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:23 crc kubenswrapper[4612]: I1203 07:42:23.612869 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fbdf1d-c06a-4145-8582-ff280680b2d5-catalog-content\") pod \"community-operators-6hrsn\" (UID: \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\") " pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:23 crc kubenswrapper[4612]: I1203 07:42:23.613402 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fbdf1d-c06a-4145-8582-ff280680b2d5-catalog-content\") pod \"community-operators-6hrsn\" (UID: \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\") " pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:23 crc kubenswrapper[4612]: I1203 07:42:23.614032 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fbdf1d-c06a-4145-8582-ff280680b2d5-utilities\") pod \"community-operators-6hrsn\" (UID: \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\") " pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:23 crc kubenswrapper[4612]: I1203 07:42:23.641177 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6kc5\" (UniqueName: \"kubernetes.io/projected/c5fbdf1d-c06a-4145-8582-ff280680b2d5-kube-api-access-x6kc5\") pod \"community-operators-6hrsn\" (UID: \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\") " pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:23 crc kubenswrapper[4612]: I1203 07:42:23.676681 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:24 crc kubenswrapper[4612]: E1203 07:42:24.895659 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx" podUID="3c3eb81e-314d-486d-afa4-443f33c54510" Dec 03 07:42:25 crc kubenswrapper[4612]: I1203 07:42:25.021435 4612 generic.go:334] "Generic (PLEG): container finished" podID="ff95ca02-ac62-4dfe-9a98-c6888d694e95" containerID="d35a7bc048f289001c9c3b17966d43bdb7f8e70b5f6169bbd2bf47b24c9b3f32" exitCode=0 Dec 03 07:42:25 crc kubenswrapper[4612]: I1203 07:42:25.021693 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tm992" event={"ID":"ff95ca02-ac62-4dfe-9a98-c6888d694e95","Type":"ContainerDied","Data":"d35a7bc048f289001c9c3b17966d43bdb7f8e70b5f6169bbd2bf47b24c9b3f32"} Dec 03 07:42:25 crc kubenswrapper[4612]: I1203 07:42:25.030204 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx" event={"ID":"3c3eb81e-314d-486d-afa4-443f33c54510","Type":"ContainerStarted","Data":"4e4a4f8f8da493655b92b7b5446c92de43bf058c2a2fbf6cd22338d257eb005c"} Dec 03 07:42:25 crc kubenswrapper[4612]: I1203 07:42:25.032697 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs" event={"ID":"797a523a-540b-4b10-a294-0543192f0c56","Type":"ContainerStarted","Data":"9f22f0ef7c85d7848058adb61fb68d8109b82c5a49d97349b73ed821d0b9498b"} Dec 03 07:42:25 crc kubenswrapper[4612]: I1203 07:42:25.034191 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs" Dec 03 07:42:25 crc kubenswrapper[4612]: I1203 07:42:25.035638 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs" Dec 03 07:42:25 crc kubenswrapper[4612]: I1203 07:42:25.053188 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6hrsn"] Dec 03 07:42:25 crc kubenswrapper[4612]: I1203 07:42:25.060565 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p" event={"ID":"ec8611ec-2e0f-4906-af03-7dc350e7e783","Type":"ContainerStarted","Data":"5e1883b2cbba0fe14c6aa8c392a24ab49bfbcdde66bb4d7819ca43826d91a406"} Dec 03 07:42:25 crc kubenswrapper[4612]: I1203 07:42:25.061177 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p" Dec 03 07:42:25 crc kubenswrapper[4612]: I1203 07:42:25.095845 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-495qs" podStartSLOduration=2.9159093499999997 podStartE2EDuration="46.0958297s" podCreationTimestamp="2025-12-03 07:41:39 +0000 UTC" firstStartedPulling="2025-12-03 07:41:41.264495768 +0000 UTC m=+864.437853168" lastFinishedPulling="2025-12-03 07:42:24.444416108 +0000 UTC m=+907.617773518" observedRunningTime="2025-12-03 07:42:25.068682952 +0000 UTC m=+908.242040352" watchObservedRunningTime="2025-12-03 07:42:25.0958297 +0000 UTC m=+908.269187100" Dec 03 07:42:25 crc kubenswrapper[4612]: I1203 07:42:25.120752 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p" podStartSLOduration=3.6983118839999998 podStartE2EDuration="47.120730752s" podCreationTimestamp="2025-12-03 07:41:38 +0000 UTC" firstStartedPulling="2025-12-03 07:41:41.158847149 +0000 UTC m=+864.332204549" lastFinishedPulling="2025-12-03 07:42:24.581266017 +0000 UTC m=+907.754623417" observedRunningTime="2025-12-03 07:42:25.11666828 +0000 UTC m=+908.290025680" watchObservedRunningTime="2025-12-03 07:42:25.120730752 +0000 UTC m=+908.294088162" Dec 03 07:42:25 crc kubenswrapper[4612]: W1203 07:42:25.127449 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5fbdf1d_c06a_4145_8582_ff280680b2d5.slice/crio-7f41a05da301918f245ea9750d9551c3a542f81a95f8893d5ac635c79df12871 WatchSource:0}: Error finding container 7f41a05da301918f245ea9750d9551c3a542f81a95f8893d5ac635c79df12871: Status 404 returned error can't find the container with id 7f41a05da301918f245ea9750d9551c3a542f81a95f8893d5ac635c79df12871 Dec 03 07:42:25 crc kubenswrapper[4612]: E1203 07:42:25.465547 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" podUID="2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2" Dec 03 07:42:25 crc kubenswrapper[4612]: E1203 07:42:25.576832 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27" podUID="24a39a3f-a75f-4029-b861-cf683db5aae2" Dec 03 07:42:25 crc kubenswrapper[4612]: E1203 07:42:25.704234 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd" podUID="39305f1e-8b3f-43aa-97d4-48410cc7fe91" Dec 03 07:42:25 crc kubenswrapper[4612]: E1203 07:42:25.785075 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69" podUID="b8715491-d469-4ade-8434-765685a955db" Dec 03 07:42:25 crc kubenswrapper[4612]: E1203 07:42:25.800093 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9" podUID="58adadbb-3706-4f8c-be33-31836f4860e5" Dec 03 07:42:25 crc kubenswrapper[4612]: E1203 07:42:25.817035 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4" podUID="5a8351bf-c4cf-40fc-8df9-22b3064770a3" Dec 03 07:42:25 crc kubenswrapper[4612]: E1203 07:42:25.906273 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g" podUID="e4cadc4e-ebfd-4886-83a2-1caf4aef2b68" Dec 03 07:42:25 crc kubenswrapper[4612]: E1203 07:42:25.920858 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s" podUID="c2db30ef-0db3-44d4-b276-3b81195d4962" Dec 03 07:42:25 crc kubenswrapper[4612]: E1203 07:42:25.931643 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" podUID="1ac4eade-01c8-4323-8796-6b2d39a7ee36" Dec 03 07:42:25 crc kubenswrapper[4612]: E1203 07:42:25.935274 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-snwlr" podUID="b52c7da9-b392-448b-a04a-1afa333df442" Dec 03 07:42:25 crc kubenswrapper[4612]: E1203 07:42:25.961509 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg" podUID="3d3cecf1-2f48-4b22-9350-870d25e786ef" Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.068784 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" event={"ID":"1ac4eade-01c8-4323-8796-6b2d39a7ee36","Type":"ContainerStarted","Data":"9a63dc85a766875f0f542a981938d1e6d4f3d9bf3e7e13efa7dfbbed60d97df9"} Dec 03 07:42:26 crc kubenswrapper[4612]: E1203 07:42:26.070369 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" podUID="1ac4eade-01c8-4323-8796-6b2d39a7ee36" Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.072847 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm" event={"ID":"d9951bd4-0756-4c79-96b0-ceaac8a1e51a","Type":"ContainerStarted","Data":"3532acd20d53d370df748b8928c0cb4422c1d3d76a90cac955f4fcd7978dbef0"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.073056 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm" Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.074661 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm" Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.075364 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4" event={"ID":"5a8351bf-c4cf-40fc-8df9-22b3064770a3","Type":"ContainerStarted","Data":"2a1d39cc71affaaedbd042f7b87ce96772caa2b3f1d73cdd5245d5e43a45adeb"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.077534 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27" event={"ID":"24a39a3f-a75f-4029-b861-cf683db5aae2","Type":"ContainerStarted","Data":"c78c27754c8ccfddd0933caf43fa846bbef09da5c1e8e60cddae96aa213f518f"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.082291 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-snwlr" event={"ID":"b52c7da9-b392-448b-a04a-1afa333df442","Type":"ContainerStarted","Data":"7d25f751d5b31eda9ea9e498b39d6c7b1c150f2a15c2c416c9f10a026273f973"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.103158 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" event={"ID":"d70b0d51-8225-4d2b-b128-aeda29446ab9","Type":"ContainerStarted","Data":"4ad6e59dbee7a875c259842a76fa1a46924a594a5e9631811787da3f99f24beb"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.121717 4612 generic.go:334] "Generic (PLEG): container finished" podID="3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" containerID="5fa0f8ff7c22991546f68222a80c0730595f5c36d62f927cc33e33cb13da5d1e" exitCode=0 Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.121781 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpt4j" event={"ID":"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b","Type":"ContainerDied","Data":"5fa0f8ff7c22991546f68222a80c0730595f5c36d62f927cc33e33cb13da5d1e"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.132514 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd" event={"ID":"39305f1e-8b3f-43aa-97d4-48410cc7fe91","Type":"ContainerStarted","Data":"4cc47e0153d42d927351db981a14e142ef96e99c7cf414e336ef5aec7f653bef"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.142491 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69" event={"ID":"b8715491-d469-4ade-8434-765685a955db","Type":"ContainerStarted","Data":"0f406823168bab2f3dacde0aefa023c44c9b3631194b09f379908783b236f35a"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.145726 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-r75nm" podStartSLOduration=4.8713478630000004 podStartE2EDuration="48.145708373s" podCreationTimestamp="2025-12-03 07:41:38 +0000 UTC" firstStartedPulling="2025-12-03 07:41:41.265887422 +0000 UTC m=+864.439244822" lastFinishedPulling="2025-12-03 07:42:24.540247932 +0000 UTC m=+907.713605332" observedRunningTime="2025-12-03 07:42:26.142122864 +0000 UTC m=+909.315480264" watchObservedRunningTime="2025-12-03 07:42:26.145708373 +0000 UTC m=+909.319065783" Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.156150 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9" event={"ID":"58adadbb-3706-4f8c-be33-31836f4860e5","Type":"ContainerStarted","Data":"6242e7f21101cce55a5c9cc33abab8aa4e63087d8a8fee8f2610f989176d5c59"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.169263 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g" event={"ID":"e4cadc4e-ebfd-4886-83a2-1caf4aef2b68","Type":"ContainerStarted","Data":"c6fb1404b61d20252a3062037382686b7710807602fa0eae25dbf57063f091d6"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.205056 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tklpk" event={"ID":"d75ef15b-d718-436d-b570-21416a0c4021","Type":"ContainerStarted","Data":"b667547e57d10b45ac33de7fe165b1fb50b0384b5a8cb726387cfea602cc2884"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.205891 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tklpk" Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.209170 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tklpk" Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.221126 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s" event={"ID":"c2db30ef-0db3-44d4-b276-3b81195d4962","Type":"ContainerStarted","Data":"d258b6162c6889ef6b4ea95b0c511ef99dd69e319091ac47492d21d2f60c2c31"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.242322 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" event={"ID":"a9a1ffeb-b3b4-4b07-911b-b829962b6827","Type":"ContainerStarted","Data":"aa40f41e50d4980b46ad8f620d26191aa3097314880913609292068b366d4aba"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.250396 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-64f7f94cfd-bdkfw" Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.266182 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" event={"ID":"2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2","Type":"ContainerStarted","Data":"8c54074d9d032ad34c195cf9a58881f0ed87b3e6c705bf1d2fa81c781f23b8db"} Dec 03 07:42:26 crc kubenswrapper[4612]: E1203 07:42:26.271160 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" podUID="2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2" Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.291605 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hrsn" event={"ID":"c5fbdf1d-c06a-4145-8582-ff280680b2d5","Type":"ContainerStarted","Data":"7f41a05da301918f245ea9750d9551c3a542f81a95f8893d5ac635c79df12871"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.293494 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg" event={"ID":"3d3cecf1-2f48-4b22-9350-870d25e786ef","Type":"ContainerStarted","Data":"371678bbb971665cb01a08b706f90f21c3dce4bd313d7488ac5106ac5c20979b"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.307064 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jbfkr" event={"ID":"99188e23-da4a-4d43-8778-a2a0b9e962dc","Type":"ContainerStarted","Data":"00959db6ee932ad0c51b8afd2f2e210684f6f159820fc0acd3a9b0029c62a3ac"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.307728 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jbfkr" Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.325260 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jbfkr" Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.332423 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb" event={"ID":"790a3a61-40c9-4360-b7b6-9f08edbec437","Type":"ContainerStarted","Data":"4412cea12009f5d27c8c72b8ca797c213b42b6369ea3823021b446b76cbcce17"} Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.332463 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb" Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.722788 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tklpk" podStartSLOduration=4.220571098 podStartE2EDuration="48.722770537s" podCreationTimestamp="2025-12-03 07:41:38 +0000 UTC" firstStartedPulling="2025-12-03 07:41:40.125087807 +0000 UTC m=+863.298445207" lastFinishedPulling="2025-12-03 07:42:24.627287256 +0000 UTC m=+907.800644646" observedRunningTime="2025-12-03 07:42:26.710317576 +0000 UTC m=+909.883674976" watchObservedRunningTime="2025-12-03 07:42:26.722770537 +0000 UTC m=+909.896127937" Dec 03 07:42:26 crc kubenswrapper[4612]: I1203 07:42:26.950238 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jbfkr" podStartSLOduration=4.621122352 podStartE2EDuration="47.950222839s" podCreationTimestamp="2025-12-03 07:41:39 +0000 UTC" firstStartedPulling="2025-12-03 07:41:41.263695278 +0000 UTC m=+864.437052678" lastFinishedPulling="2025-12-03 07:42:24.592795765 +0000 UTC m=+907.766153165" observedRunningTime="2025-12-03 07:42:26.949035919 +0000 UTC m=+910.122393319" watchObservedRunningTime="2025-12-03 07:42:26.950222839 +0000 UTC m=+910.123580239" Dec 03 07:42:27 crc kubenswrapper[4612]: I1203 07:42:27.076382 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb" podStartSLOduration=5.298518353 podStartE2EDuration="49.076358119s" podCreationTimestamp="2025-12-03 07:41:38 +0000 UTC" firstStartedPulling="2025-12-03 07:41:40.84782963 +0000 UTC m=+864.021187030" lastFinishedPulling="2025-12-03 07:42:24.625669396 +0000 UTC m=+907.799026796" observedRunningTime="2025-12-03 07:42:27.076231246 +0000 UTC m=+910.249588646" watchObservedRunningTime="2025-12-03 07:42:27.076358119 +0000 UTC m=+910.249715529" Dec 03 07:42:27 crc kubenswrapper[4612]: I1203 07:42:27.340352 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tm992" event={"ID":"ff95ca02-ac62-4dfe-9a98-c6888d694e95","Type":"ContainerStarted","Data":"449084e162f16120959c26f39e9d544618d2033de9c0c440e3c08555ecc3f004"} Dec 03 07:42:27 crc kubenswrapper[4612]: I1203 07:42:27.342577 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" event={"ID":"a9a1ffeb-b3b4-4b07-911b-b829962b6827","Type":"ContainerStarted","Data":"faa1e50cb825aa0cdf0886558fff6d6317ac7858d3649f011691a0af119ec0e8"} Dec 03 07:42:27 crc kubenswrapper[4612]: I1203 07:42:27.342705 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:42:27 crc kubenswrapper[4612]: I1203 07:42:27.344435 4612 generic.go:334] "Generic (PLEG): container finished" podID="c5fbdf1d-c06a-4145-8582-ff280680b2d5" containerID="a9e2899cf2fdf103021cf971dbc1af904feb109609b80497b0830734c575a0b4" exitCode=0 Dec 03 07:42:27 crc kubenswrapper[4612]: I1203 07:42:27.344470 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hrsn" event={"ID":"c5fbdf1d-c06a-4145-8582-ff280680b2d5","Type":"ContainerDied","Data":"a9e2899cf2fdf103021cf971dbc1af904feb109609b80497b0830734c575a0b4"} Dec 03 07:42:27 crc kubenswrapper[4612]: I1203 07:42:27.346491 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx" event={"ID":"3c3eb81e-314d-486d-afa4-443f33c54510","Type":"ContainerStarted","Data":"e22191788d6fa1424eb91736a96714abf9096849168c78104948c59c207bf389"} Dec 03 07:42:27 crc kubenswrapper[4612]: I1203 07:42:27.347174 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx" Dec 03 07:42:27 crc kubenswrapper[4612]: I1203 07:42:27.350079 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" event={"ID":"d70b0d51-8225-4d2b-b128-aeda29446ab9","Type":"ContainerStarted","Data":"ebc262456d97dd4d1071adc5f0c80da33c9243aff311369120b5a53721946da0"} Dec 03 07:42:27 crc kubenswrapper[4612]: I1203 07:42:27.350112 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:42:27 crc kubenswrapper[4612]: I1203 07:42:27.521167 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx" podStartSLOduration=4.34194765 podStartE2EDuration="49.521143289s" podCreationTimestamp="2025-12-03 07:41:38 +0000 UTC" firstStartedPulling="2025-12-03 07:41:40.506544675 +0000 UTC m=+863.679902075" lastFinishedPulling="2025-12-03 07:42:25.685740324 +0000 UTC m=+908.859097714" observedRunningTime="2025-12-03 07:42:27.515799486 +0000 UTC m=+910.689156896" watchObservedRunningTime="2025-12-03 07:42:27.521143289 +0000 UTC m=+910.694500699" Dec 03 07:42:27 crc kubenswrapper[4612]: I1203 07:42:27.599272 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" podStartSLOduration=41.902767377 podStartE2EDuration="49.599257971s" podCreationTimestamp="2025-12-03 07:41:38 +0000 UTC" firstStartedPulling="2025-12-03 07:42:16.853375118 +0000 UTC m=+900.026732518" lastFinishedPulling="2025-12-03 07:42:24.549865712 +0000 UTC m=+907.723223112" observedRunningTime="2025-12-03 07:42:27.598299827 +0000 UTC m=+910.771657227" watchObservedRunningTime="2025-12-03 07:42:27.599257971 +0000 UTC m=+910.772615371" Dec 03 07:42:27 crc kubenswrapper[4612]: I1203 07:42:27.805142 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" podStartSLOduration=41.082699131 podStartE2EDuration="48.805126553s" podCreationTimestamp="2025-12-03 07:41:39 +0000 UTC" firstStartedPulling="2025-12-03 07:42:16.853737937 +0000 UTC m=+900.027095347" lastFinishedPulling="2025-12-03 07:42:24.576165369 +0000 UTC m=+907.749522769" observedRunningTime="2025-12-03 07:42:27.802648491 +0000 UTC m=+910.976005891" watchObservedRunningTime="2025-12-03 07:42:27.805126553 +0000 UTC m=+910.978483953" Dec 03 07:42:28 crc kubenswrapper[4612]: I1203 07:42:28.387748 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpt4j" event={"ID":"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b","Type":"ContainerStarted","Data":"d4ec4d779fd5fe024fa1f6c3fd8fbcdaeacf8ba2bfc5ba8716412faf48e2a332"} Dec 03 07:42:28 crc kubenswrapper[4612]: I1203 07:42:28.400343 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4" event={"ID":"5a8351bf-c4cf-40fc-8df9-22b3064770a3","Type":"ContainerStarted","Data":"6ceee35e41f22090692e5ec1a90a678621b928363efa05cce1efef7dcbd69375"} Dec 03 07:42:28 crc kubenswrapper[4612]: I1203 07:42:28.401178 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4" Dec 03 07:42:28 crc kubenswrapper[4612]: I1203 07:42:28.403587 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-snwlr" event={"ID":"b52c7da9-b392-448b-a04a-1afa333df442","Type":"ContainerStarted","Data":"f3dfb303f6fc68efc24250bbc16b36ad3599f9de29cbeeff5ac0a21ae89baff5"} Dec 03 07:42:28 crc kubenswrapper[4612]: I1203 07:42:28.403801 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-snwlr" Dec 03 07:42:28 crc kubenswrapper[4612]: I1203 07:42:28.429658 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27" event={"ID":"24a39a3f-a75f-4029-b861-cf683db5aae2","Type":"ContainerStarted","Data":"79fb4ff69ba0e14aa73a5d6d2b7d47ada1b00c33fefa9a1515eb33a80258ae56"} Dec 03 07:42:28 crc kubenswrapper[4612]: I1203 07:42:28.495023 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4" podStartSLOduration=3.972168179 podStartE2EDuration="49.495007801s" podCreationTimestamp="2025-12-03 07:41:39 +0000 UTC" firstStartedPulling="2025-12-03 07:41:41.197654108 +0000 UTC m=+864.371011508" lastFinishedPulling="2025-12-03 07:42:26.72049373 +0000 UTC m=+909.893851130" observedRunningTime="2025-12-03 07:42:28.493809932 +0000 UTC m=+911.667167342" watchObservedRunningTime="2025-12-03 07:42:28.495007801 +0000 UTC m=+911.668365201" Dec 03 07:42:28 crc kubenswrapper[4612]: I1203 07:42:28.496638 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jpt4j" podStartSLOduration=22.421323041 podStartE2EDuration="28.496631781s" podCreationTimestamp="2025-12-03 07:42:00 +0000 UTC" firstStartedPulling="2025-12-03 07:42:20.743109036 +0000 UTC m=+903.916466426" lastFinishedPulling="2025-12-03 07:42:26.818417766 +0000 UTC m=+909.991775166" observedRunningTime="2025-12-03 07:42:28.436401299 +0000 UTC m=+911.609758699" watchObservedRunningTime="2025-12-03 07:42:28.496631781 +0000 UTC m=+911.669989171" Dec 03 07:42:28 crc kubenswrapper[4612]: I1203 07:42:28.567081 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-snwlr" podStartSLOduration=3.999350496 podStartE2EDuration="49.567059829s" podCreationTimestamp="2025-12-03 07:41:39 +0000 UTC" firstStartedPulling="2025-12-03 07:41:41.260238211 +0000 UTC m=+864.433595611" lastFinishedPulling="2025-12-03 07:42:26.827947544 +0000 UTC m=+910.001304944" observedRunningTime="2025-12-03 07:42:28.539235274 +0000 UTC m=+911.712592674" watchObservedRunningTime="2025-12-03 07:42:28.567059829 +0000 UTC m=+911.740417229" Dec 03 07:42:29 crc kubenswrapper[4612]: I1203 07:42:29.437113 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9" event={"ID":"58adadbb-3706-4f8c-be33-31836f4860e5","Type":"ContainerStarted","Data":"76c1d0fb894619321340216f2541ec238532ef6d0d472c14e437faab9bda6dac"} Dec 03 07:42:29 crc kubenswrapper[4612]: I1203 07:42:29.441400 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s" event={"ID":"c2db30ef-0db3-44d4-b276-3b81195d4962","Type":"ContainerStarted","Data":"42120dcbf55fe54e7fde10358a077f8faf6cda6b732c6392ddcfa1d9ecd90493"} Dec 03 07:42:29 crc kubenswrapper[4612]: I1203 07:42:29.441595 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s" Dec 03 07:42:29 crc kubenswrapper[4612]: I1203 07:42:29.442985 4612 generic.go:334] "Generic (PLEG): container finished" podID="ff95ca02-ac62-4dfe-9a98-c6888d694e95" containerID="449084e162f16120959c26f39e9d544618d2033de9c0c440e3c08555ecc3f004" exitCode=0 Dec 03 07:42:29 crc kubenswrapper[4612]: I1203 07:42:29.443065 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tm992" event={"ID":"ff95ca02-ac62-4dfe-9a98-c6888d694e95","Type":"ContainerDied","Data":"449084e162f16120959c26f39e9d544618d2033de9c0c440e3c08555ecc3f004"} Dec 03 07:42:29 crc kubenswrapper[4612]: I1203 07:42:29.443698 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27" Dec 03 07:42:29 crc kubenswrapper[4612]: I1203 07:42:29.470420 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s" podStartSLOduration=5.4223988819999995 podStartE2EDuration="51.470405922s" podCreationTimestamp="2025-12-03 07:41:38 +0000 UTC" firstStartedPulling="2025-12-03 07:41:40.854035525 +0000 UTC m=+864.027392925" lastFinishedPulling="2025-12-03 07:42:26.902042565 +0000 UTC m=+910.075399965" observedRunningTime="2025-12-03 07:42:29.467515642 +0000 UTC m=+912.640873052" watchObservedRunningTime="2025-12-03 07:42:29.470405922 +0000 UTC m=+912.643763322" Dec 03 07:42:29 crc kubenswrapper[4612]: I1203 07:42:29.471692 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27" podStartSLOduration=5.874108214 podStartE2EDuration="51.471686943s" podCreationTimestamp="2025-12-03 07:41:38 +0000 UTC" firstStartedPulling="2025-12-03 07:41:41.231873833 +0000 UTC m=+864.405231233" lastFinishedPulling="2025-12-03 07:42:26.829452562 +0000 UTC m=+910.002809962" observedRunningTime="2025-12-03 07:42:28.563276907 +0000 UTC m=+911.736634307" watchObservedRunningTime="2025-12-03 07:42:29.471686943 +0000 UTC m=+912.645044343" Dec 03 07:42:29 crc kubenswrapper[4612]: I1203 07:42:29.524888 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-b2s4p" Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.452427 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tm992" event={"ID":"ff95ca02-ac62-4dfe-9a98-c6888d694e95","Type":"ContainerStarted","Data":"da03f13559fdfc72882a4548dba0a731ae92a4ff63f90058fd2cbe0a47032d9a"} Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.454969 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hrsn" event={"ID":"c5fbdf1d-c06a-4145-8582-ff280680b2d5","Type":"ContainerStarted","Data":"e91e4f0df76cb7955101e61c1b717d8d3c90e2989e2480b3f4f6a4e7c844efbc"} Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.457196 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g" event={"ID":"e4cadc4e-ebfd-4886-83a2-1caf4aef2b68","Type":"ContainerStarted","Data":"d1c54ab16a2e9c5e38334902975baded9d8b25a27444f8e3d2eb451749f78868"} Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.457332 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g" Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.459610 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg" event={"ID":"3d3cecf1-2f48-4b22-9350-870d25e786ef","Type":"ContainerStarted","Data":"0daa43ffb2157d6cbe476bb0ae5cae495d02d372a00be456248f16f2b687eff6"} Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.459711 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg" Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.461603 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd" event={"ID":"39305f1e-8b3f-43aa-97d4-48410cc7fe91","Type":"ContainerStarted","Data":"28a941a14f8d62d863b8fbe79f5d74126f03511d2a6e6294118b3ea6fb5b3087"} Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.461763 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd" Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.463552 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69" event={"ID":"b8715491-d469-4ade-8434-765685a955db","Type":"ContainerStarted","Data":"124f366d53e0aff13e6a61200b49768dcd19967ca0de15d9d7c814f5f6b11036"} Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.464344 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9" Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.495151 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tm992" podStartSLOduration=8.56268294 podStartE2EDuration="13.49513448s" podCreationTimestamp="2025-12-03 07:42:17 +0000 UTC" firstStartedPulling="2025-12-03 07:42:25.03459591 +0000 UTC m=+908.207953310" lastFinishedPulling="2025-12-03 07:42:29.96704745 +0000 UTC m=+913.140404850" observedRunningTime="2025-12-03 07:42:30.490279702 +0000 UTC m=+913.663637102" watchObservedRunningTime="2025-12-03 07:42:30.49513448 +0000 UTC m=+913.668491880" Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.524041 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g" podStartSLOduration=5.814780624 podStartE2EDuration="52.524022221s" podCreationTimestamp="2025-12-03 07:41:38 +0000 UTC" firstStartedPulling="2025-12-03 07:41:41.195979156 +0000 UTC m=+864.369336556" lastFinishedPulling="2025-12-03 07:42:27.905220753 +0000 UTC m=+911.078578153" observedRunningTime="2025-12-03 07:42:30.522858283 +0000 UTC m=+913.696215683" watchObservedRunningTime="2025-12-03 07:42:30.524022221 +0000 UTC m=+913.697379641" Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.657612 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd" podStartSLOduration=5.264208955 podStartE2EDuration="52.657598181s" podCreationTimestamp="2025-12-03 07:41:38 +0000 UTC" firstStartedPulling="2025-12-03 07:41:40.489438838 +0000 UTC m=+863.662796238" lastFinishedPulling="2025-12-03 07:42:27.882828064 +0000 UTC m=+911.056185464" observedRunningTime="2025-12-03 07:42:30.625036211 +0000 UTC m=+913.798393621" watchObservedRunningTime="2025-12-03 07:42:30.657598181 +0000 UTC m=+913.830955581" Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.705884 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9" podStartSLOduration=5.980504893 podStartE2EDuration="52.705869662s" podCreationTimestamp="2025-12-03 07:41:38 +0000 UTC" firstStartedPulling="2025-12-03 07:41:40.841616885 +0000 UTC m=+864.014974285" lastFinishedPulling="2025-12-03 07:42:27.566981654 +0000 UTC m=+910.740339054" observedRunningTime="2025-12-03 07:42:30.663086664 +0000 UTC m=+913.836444064" watchObservedRunningTime="2025-12-03 07:42:30.705869662 +0000 UTC m=+913.879227052" Dec 03 07:42:30 crc kubenswrapper[4612]: I1203 07:42:30.706217 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69" podStartSLOduration=6.533929547 podStartE2EDuration="52.706212921s" podCreationTimestamp="2025-12-03 07:41:38 +0000 UTC" firstStartedPulling="2025-12-03 07:41:40.729794482 +0000 UTC m=+863.903151882" lastFinishedPulling="2025-12-03 07:42:26.902077856 +0000 UTC m=+910.075435256" observedRunningTime="2025-12-03 07:42:30.703904165 +0000 UTC m=+913.877261565" watchObservedRunningTime="2025-12-03 07:42:30.706212921 +0000 UTC m=+913.879570321" Dec 03 07:42:31 crc kubenswrapper[4612]: I1203 07:42:31.100221 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-rdf2f" Dec 03 07:42:31 crc kubenswrapper[4612]: I1203 07:42:31.102356 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:31 crc kubenswrapper[4612]: I1203 07:42:31.102467 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:31 crc kubenswrapper[4612]: I1203 07:42:31.132870 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg" podStartSLOduration=5.493916191 podStartE2EDuration="53.1328536s" podCreationTimestamp="2025-12-03 07:41:38 +0000 UTC" firstStartedPulling="2025-12-03 07:41:40.267592347 +0000 UTC m=+863.440949747" lastFinishedPulling="2025-12-03 07:42:27.906529756 +0000 UTC m=+911.079887156" observedRunningTime="2025-12-03 07:42:30.825616767 +0000 UTC m=+913.998974167" watchObservedRunningTime="2025-12-03 07:42:31.1328536 +0000 UTC m=+914.306211000" Dec 03 07:42:31 crc kubenswrapper[4612]: I1203 07:42:31.471997 4612 generic.go:334] "Generic (PLEG): container finished" podID="c5fbdf1d-c06a-4145-8582-ff280680b2d5" containerID="e91e4f0df76cb7955101e61c1b717d8d3c90e2989e2480b3f4f6a4e7c844efbc" exitCode=0 Dec 03 07:42:31 crc kubenswrapper[4612]: I1203 07:42:31.473045 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hrsn" event={"ID":"c5fbdf1d-c06a-4145-8582-ff280680b2d5","Type":"ContainerDied","Data":"e91e4f0df76cb7955101e61c1b717d8d3c90e2989e2480b3f4f6a4e7c844efbc"} Dec 03 07:42:31 crc kubenswrapper[4612]: I1203 07:42:31.474026 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69" Dec 03 07:42:32 crc kubenswrapper[4612]: I1203 07:42:32.153630 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-jpt4j" podUID="3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" containerName="registry-server" probeResult="failure" output=< Dec 03 07:42:32 crc kubenswrapper[4612]: timeout: failed to connect service ":50051" within 1s Dec 03 07:42:32 crc kubenswrapper[4612]: > Dec 03 07:42:32 crc kubenswrapper[4612]: I1203 07:42:32.480243 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hrsn" event={"ID":"c5fbdf1d-c06a-4145-8582-ff280680b2d5","Type":"ContainerStarted","Data":"bacb3309114b539825d225470d454b1883be120cf1f8d33d4bfd54d576d7e502"} Dec 03 07:42:32 crc kubenswrapper[4612]: I1203 07:42:32.499168 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6hrsn" podStartSLOduration=4.904599832 podStartE2EDuration="9.499148104s" podCreationTimestamp="2025-12-03 07:42:23 +0000 UTC" firstStartedPulling="2025-12-03 07:42:27.466659209 +0000 UTC m=+910.640016609" lastFinishedPulling="2025-12-03 07:42:32.061207481 +0000 UTC m=+915.234564881" observedRunningTime="2025-12-03 07:42:32.4944206 +0000 UTC m=+915.667778010" watchObservedRunningTime="2025-12-03 07:42:32.499148104 +0000 UTC m=+915.672505514" Dec 03 07:42:33 crc kubenswrapper[4612]: I1203 07:42:33.677444 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:33 crc kubenswrapper[4612]: I1203 07:42:33.677501 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:34 crc kubenswrapper[4612]: I1203 07:42:34.741566 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-6hrsn" podUID="c5fbdf1d-c06a-4145-8582-ff280680b2d5" containerName="registry-server" probeResult="failure" output=< Dec 03 07:42:34 crc kubenswrapper[4612]: timeout: failed to connect service ":50051" within 1s Dec 03 07:42:34 crc kubenswrapper[4612]: > Dec 03 07:42:35 crc kubenswrapper[4612]: I1203 07:42:35.444754 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9" Dec 03 07:42:38 crc kubenswrapper[4612]: I1203 07:42:38.134459 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:38 crc kubenswrapper[4612]: I1203 07:42:38.134502 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:38 crc kubenswrapper[4612]: I1203 07:42:38.889930 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-n2rzg" Dec 03 07:42:38 crc kubenswrapper[4612]: I1203 07:42:38.958153 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-2pxgx" Dec 03 07:42:39 crc kubenswrapper[4612]: I1203 07:42:39.175561 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tm992" podUID="ff95ca02-ac62-4dfe-9a98-c6888d694e95" containerName="registry-server" probeResult="failure" output=< Dec 03 07:42:39 crc kubenswrapper[4612]: timeout: failed to connect service ":50051" within 1s Dec 03 07:42:39 crc kubenswrapper[4612]: > Dec 03 07:42:39 crc kubenswrapper[4612]: I1203 07:42:39.305486 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-5rg69" Dec 03 07:42:39 crc kubenswrapper[4612]: I1203 07:42:39.348040 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-6mbrd" Dec 03 07:42:39 crc kubenswrapper[4612]: I1203 07:42:39.368293 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-flkb9" Dec 03 07:42:39 crc kubenswrapper[4612]: I1203 07:42:39.382858 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bs99s" Dec 03 07:42:39 crc kubenswrapper[4612]: I1203 07:42:39.397331 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-d98bb" Dec 03 07:42:39 crc kubenswrapper[4612]: I1203 07:42:39.435676 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-m898g" Dec 03 07:42:39 crc kubenswrapper[4612]: I1203 07:42:39.581666 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-4vp27" Dec 03 07:42:39 crc kubenswrapper[4612]: I1203 07:42:39.963240 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-kv6n4" Dec 03 07:42:40 crc kubenswrapper[4612]: I1203 07:42:40.113990 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-snwlr" Dec 03 07:42:40 crc kubenswrapper[4612]: I1203 07:42:40.550617 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" event={"ID":"1ac4eade-01c8-4323-8796-6b2d39a7ee36","Type":"ContainerStarted","Data":"603d98d859dffcfb0a455418601a281c226f6ba5aff26932809d59fe28a6ac66"} Dec 03 07:42:40 crc kubenswrapper[4612]: I1203 07:42:40.551025 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" Dec 03 07:42:40 crc kubenswrapper[4612]: I1203 07:42:40.567154 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" podStartSLOduration=3.518973463 podStartE2EDuration="1m1.567134578s" podCreationTimestamp="2025-12-03 07:41:39 +0000 UTC" firstStartedPulling="2025-12-03 07:41:41.437040108 +0000 UTC m=+864.610397508" lastFinishedPulling="2025-12-03 07:42:39.485201223 +0000 UTC m=+922.658558623" observedRunningTime="2025-12-03 07:42:40.566012191 +0000 UTC m=+923.739369611" watchObservedRunningTime="2025-12-03 07:42:40.567134578 +0000 UTC m=+923.740491978" Dec 03 07:42:41 crc kubenswrapper[4612]: I1203 07:42:41.154367 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:41 crc kubenswrapper[4612]: I1203 07:42:41.210802 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:41 crc kubenswrapper[4612]: I1203 07:42:41.383217 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpt4j"] Dec 03 07:42:41 crc kubenswrapper[4612]: I1203 07:42:41.559105 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" event={"ID":"2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2","Type":"ContainerStarted","Data":"14a3c6509b588ea15e779436c0c793ca4b7f3eef8f86faff4a2feb6bb6c873d3"} Dec 03 07:42:41 crc kubenswrapper[4612]: I1203 07:42:41.560051 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" Dec 03 07:42:41 crc kubenswrapper[4612]: I1203 07:42:41.576868 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" podStartSLOduration=3.373083908 podStartE2EDuration="1m2.576846042s" podCreationTimestamp="2025-12-03 07:41:39 +0000 UTC" firstStartedPulling="2025-12-03 07:41:41.275687337 +0000 UTC m=+864.449044737" lastFinishedPulling="2025-12-03 07:42:40.479449471 +0000 UTC m=+923.652806871" observedRunningTime="2025-12-03 07:42:41.573533561 +0000 UTC m=+924.746890981" watchObservedRunningTime="2025-12-03 07:42:41.576846042 +0000 UTC m=+924.750203462" Dec 03 07:42:42 crc kubenswrapper[4612]: I1203 07:42:42.564851 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jpt4j" podUID="3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" containerName="registry-server" containerID="cri-o://d4ec4d779fd5fe024fa1f6c3fd8fbcdaeacf8ba2bfc5ba8716412faf48e2a332" gracePeriod=2 Dec 03 07:42:42 crc kubenswrapper[4612]: I1203 07:42:42.968060 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.067863 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rp6vs\" (UniqueName: \"kubernetes.io/projected/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-kube-api-access-rp6vs\") pod \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\" (UID: \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\") " Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.067967 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-catalog-content\") pod \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\" (UID: \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\") " Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.068037 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-utilities\") pod \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\" (UID: \"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b\") " Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.069111 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-utilities" (OuterVolumeSpecName: "utilities") pod "3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" (UID: "3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.081163 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-kube-api-access-rp6vs" (OuterVolumeSpecName: "kube-api-access-rp6vs") pod "3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" (UID: "3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b"). InnerVolumeSpecName "kube-api-access-rp6vs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.087875 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" (UID: "3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.169609 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rp6vs\" (UniqueName: \"kubernetes.io/projected/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-kube-api-access-rp6vs\") on node \"crc\" DevicePath \"\"" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.169661 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.169682 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.573495 4612 generic.go:334] "Generic (PLEG): container finished" podID="3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" containerID="d4ec4d779fd5fe024fa1f6c3fd8fbcdaeacf8ba2bfc5ba8716412faf48e2a332" exitCode=0 Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.573529 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpt4j" event={"ID":"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b","Type":"ContainerDied","Data":"d4ec4d779fd5fe024fa1f6c3fd8fbcdaeacf8ba2bfc5ba8716412faf48e2a332"} Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.573857 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpt4j" event={"ID":"3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b","Type":"ContainerDied","Data":"dc4f9f41ecdf8e952c1054e3b0ab9bacdc7389f00a85668ae2cada329286c900"} Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.573541 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jpt4j" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.574038 4612 scope.go:117] "RemoveContainer" containerID="d4ec4d779fd5fe024fa1f6c3fd8fbcdaeacf8ba2bfc5ba8716412faf48e2a332" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.598557 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpt4j"] Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.606283 4612 scope.go:117] "RemoveContainer" containerID="5fa0f8ff7c22991546f68222a80c0730595f5c36d62f927cc33e33cb13da5d1e" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.606574 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpt4j"] Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.622895 4612 scope.go:117] "RemoveContainer" containerID="4029cc618cea85283b8c14e191b2ac0c0264087a5952d14847fb66489be81669" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.654372 4612 scope.go:117] "RemoveContainer" containerID="d4ec4d779fd5fe024fa1f6c3fd8fbcdaeacf8ba2bfc5ba8716412faf48e2a332" Dec 03 07:42:43 crc kubenswrapper[4612]: E1203 07:42:43.654709 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4ec4d779fd5fe024fa1f6c3fd8fbcdaeacf8ba2bfc5ba8716412faf48e2a332\": container with ID starting with d4ec4d779fd5fe024fa1f6c3fd8fbcdaeacf8ba2bfc5ba8716412faf48e2a332 not found: ID does not exist" containerID="d4ec4d779fd5fe024fa1f6c3fd8fbcdaeacf8ba2bfc5ba8716412faf48e2a332" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.654740 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4ec4d779fd5fe024fa1f6c3fd8fbcdaeacf8ba2bfc5ba8716412faf48e2a332"} err="failed to get container status \"d4ec4d779fd5fe024fa1f6c3fd8fbcdaeacf8ba2bfc5ba8716412faf48e2a332\": rpc error: code = NotFound desc = could not find container \"d4ec4d779fd5fe024fa1f6c3fd8fbcdaeacf8ba2bfc5ba8716412faf48e2a332\": container with ID starting with d4ec4d779fd5fe024fa1f6c3fd8fbcdaeacf8ba2bfc5ba8716412faf48e2a332 not found: ID does not exist" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.654759 4612 scope.go:117] "RemoveContainer" containerID="5fa0f8ff7c22991546f68222a80c0730595f5c36d62f927cc33e33cb13da5d1e" Dec 03 07:42:43 crc kubenswrapper[4612]: E1203 07:42:43.655133 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fa0f8ff7c22991546f68222a80c0730595f5c36d62f927cc33e33cb13da5d1e\": container with ID starting with 5fa0f8ff7c22991546f68222a80c0730595f5c36d62f927cc33e33cb13da5d1e not found: ID does not exist" containerID="5fa0f8ff7c22991546f68222a80c0730595f5c36d62f927cc33e33cb13da5d1e" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.655192 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fa0f8ff7c22991546f68222a80c0730595f5c36d62f927cc33e33cb13da5d1e"} err="failed to get container status \"5fa0f8ff7c22991546f68222a80c0730595f5c36d62f927cc33e33cb13da5d1e\": rpc error: code = NotFound desc = could not find container \"5fa0f8ff7c22991546f68222a80c0730595f5c36d62f927cc33e33cb13da5d1e\": container with ID starting with 5fa0f8ff7c22991546f68222a80c0730595f5c36d62f927cc33e33cb13da5d1e not found: ID does not exist" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.655218 4612 scope.go:117] "RemoveContainer" containerID="4029cc618cea85283b8c14e191b2ac0c0264087a5952d14847fb66489be81669" Dec 03 07:42:43 crc kubenswrapper[4612]: E1203 07:42:43.655569 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4029cc618cea85283b8c14e191b2ac0c0264087a5952d14847fb66489be81669\": container with ID starting with 4029cc618cea85283b8c14e191b2ac0c0264087a5952d14847fb66489be81669 not found: ID does not exist" containerID="4029cc618cea85283b8c14e191b2ac0c0264087a5952d14847fb66489be81669" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.655613 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4029cc618cea85283b8c14e191b2ac0c0264087a5952d14847fb66489be81669"} err="failed to get container status \"4029cc618cea85283b8c14e191b2ac0c0264087a5952d14847fb66489be81669\": rpc error: code = NotFound desc = could not find container \"4029cc618cea85283b8c14e191b2ac0c0264087a5952d14847fb66489be81669\": container with ID starting with 4029cc618cea85283b8c14e191b2ac0c0264087a5952d14847fb66489be81669 not found: ID does not exist" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.719201 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:43 crc kubenswrapper[4612]: I1203 07:42:43.762187 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:45 crc kubenswrapper[4612]: I1203 07:42:45.100818 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" path="/var/lib/kubelet/pods/3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b/volumes" Dec 03 07:42:45 crc kubenswrapper[4612]: I1203 07:42:45.988659 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6hrsn"] Dec 03 07:42:45 crc kubenswrapper[4612]: I1203 07:42:45.989268 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6hrsn" podUID="c5fbdf1d-c06a-4145-8582-ff280680b2d5" containerName="registry-server" containerID="cri-o://bacb3309114b539825d225470d454b1883be120cf1f8d33d4bfd54d576d7e502" gracePeriod=2 Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.418098 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.516641 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6kc5\" (UniqueName: \"kubernetes.io/projected/c5fbdf1d-c06a-4145-8582-ff280680b2d5-kube-api-access-x6kc5\") pod \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\" (UID: \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\") " Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.516776 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fbdf1d-c06a-4145-8582-ff280680b2d5-catalog-content\") pod \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\" (UID: \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\") " Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.516833 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fbdf1d-c06a-4145-8582-ff280680b2d5-utilities\") pod \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\" (UID: \"c5fbdf1d-c06a-4145-8582-ff280680b2d5\") " Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.517966 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5fbdf1d-c06a-4145-8582-ff280680b2d5-utilities" (OuterVolumeSpecName: "utilities") pod "c5fbdf1d-c06a-4145-8582-ff280680b2d5" (UID: "c5fbdf1d-c06a-4145-8582-ff280680b2d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.522878 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5fbdf1d-c06a-4145-8582-ff280680b2d5-kube-api-access-x6kc5" (OuterVolumeSpecName: "kube-api-access-x6kc5") pod "c5fbdf1d-c06a-4145-8582-ff280680b2d5" (UID: "c5fbdf1d-c06a-4145-8582-ff280680b2d5"). InnerVolumeSpecName "kube-api-access-x6kc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.562356 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5fbdf1d-c06a-4145-8582-ff280680b2d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5fbdf1d-c06a-4145-8582-ff280680b2d5" (UID: "c5fbdf1d-c06a-4145-8582-ff280680b2d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.598465 4612 generic.go:334] "Generic (PLEG): container finished" podID="c5fbdf1d-c06a-4145-8582-ff280680b2d5" containerID="bacb3309114b539825d225470d454b1883be120cf1f8d33d4bfd54d576d7e502" exitCode=0 Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.598513 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hrsn" event={"ID":"c5fbdf1d-c06a-4145-8582-ff280680b2d5","Type":"ContainerDied","Data":"bacb3309114b539825d225470d454b1883be120cf1f8d33d4bfd54d576d7e502"} Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.598546 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hrsn" event={"ID":"c5fbdf1d-c06a-4145-8582-ff280680b2d5","Type":"ContainerDied","Data":"7f41a05da301918f245ea9750d9551c3a542f81a95f8893d5ac635c79df12871"} Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.598568 4612 scope.go:117] "RemoveContainer" containerID="bacb3309114b539825d225470d454b1883be120cf1f8d33d4bfd54d576d7e502" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.598745 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6hrsn" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.617078 4612 scope.go:117] "RemoveContainer" containerID="e91e4f0df76cb7955101e61c1b717d8d3c90e2989e2480b3f4f6a4e7c844efbc" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.617872 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6kc5\" (UniqueName: \"kubernetes.io/projected/c5fbdf1d-c06a-4145-8582-ff280680b2d5-kube-api-access-x6kc5\") on node \"crc\" DevicePath \"\"" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.617892 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fbdf1d-c06a-4145-8582-ff280680b2d5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.617903 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fbdf1d-c06a-4145-8582-ff280680b2d5-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.641888 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6hrsn"] Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.647549 4612 scope.go:117] "RemoveContainer" containerID="a9e2899cf2fdf103021cf971dbc1af904feb109609b80497b0830734c575a0b4" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.648286 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6hrsn"] Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.665168 4612 scope.go:117] "RemoveContainer" containerID="bacb3309114b539825d225470d454b1883be120cf1f8d33d4bfd54d576d7e502" Dec 03 07:42:46 crc kubenswrapper[4612]: E1203 07:42:46.666311 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bacb3309114b539825d225470d454b1883be120cf1f8d33d4bfd54d576d7e502\": container with ID starting with bacb3309114b539825d225470d454b1883be120cf1f8d33d4bfd54d576d7e502 not found: ID does not exist" containerID="bacb3309114b539825d225470d454b1883be120cf1f8d33d4bfd54d576d7e502" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.666448 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bacb3309114b539825d225470d454b1883be120cf1f8d33d4bfd54d576d7e502"} err="failed to get container status \"bacb3309114b539825d225470d454b1883be120cf1f8d33d4bfd54d576d7e502\": rpc error: code = NotFound desc = could not find container \"bacb3309114b539825d225470d454b1883be120cf1f8d33d4bfd54d576d7e502\": container with ID starting with bacb3309114b539825d225470d454b1883be120cf1f8d33d4bfd54d576d7e502 not found: ID does not exist" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.666561 4612 scope.go:117] "RemoveContainer" containerID="e91e4f0df76cb7955101e61c1b717d8d3c90e2989e2480b3f4f6a4e7c844efbc" Dec 03 07:42:46 crc kubenswrapper[4612]: E1203 07:42:46.666983 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e91e4f0df76cb7955101e61c1b717d8d3c90e2989e2480b3f4f6a4e7c844efbc\": container with ID starting with e91e4f0df76cb7955101e61c1b717d8d3c90e2989e2480b3f4f6a4e7c844efbc not found: ID does not exist" containerID="e91e4f0df76cb7955101e61c1b717d8d3c90e2989e2480b3f4f6a4e7c844efbc" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.667003 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e91e4f0df76cb7955101e61c1b717d8d3c90e2989e2480b3f4f6a4e7c844efbc"} err="failed to get container status \"e91e4f0df76cb7955101e61c1b717d8d3c90e2989e2480b3f4f6a4e7c844efbc\": rpc error: code = NotFound desc = could not find container \"e91e4f0df76cb7955101e61c1b717d8d3c90e2989e2480b3f4f6a4e7c844efbc\": container with ID starting with e91e4f0df76cb7955101e61c1b717d8d3c90e2989e2480b3f4f6a4e7c844efbc not found: ID does not exist" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.667016 4612 scope.go:117] "RemoveContainer" containerID="a9e2899cf2fdf103021cf971dbc1af904feb109609b80497b0830734c575a0b4" Dec 03 07:42:46 crc kubenswrapper[4612]: E1203 07:42:46.667365 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9e2899cf2fdf103021cf971dbc1af904feb109609b80497b0830734c575a0b4\": container with ID starting with a9e2899cf2fdf103021cf971dbc1af904feb109609b80497b0830734c575a0b4 not found: ID does not exist" containerID="a9e2899cf2fdf103021cf971dbc1af904feb109609b80497b0830734c575a0b4" Dec 03 07:42:46 crc kubenswrapper[4612]: I1203 07:42:46.667383 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9e2899cf2fdf103021cf971dbc1af904feb109609b80497b0830734c575a0b4"} err="failed to get container status \"a9e2899cf2fdf103021cf971dbc1af904feb109609b80497b0830734c575a0b4\": rpc error: code = NotFound desc = could not find container \"a9e2899cf2fdf103021cf971dbc1af904feb109609b80497b0830734c575a0b4\": container with ID starting with a9e2899cf2fdf103021cf971dbc1af904feb109609b80497b0830734c575a0b4 not found: ID does not exist" Dec 03 07:42:47 crc kubenswrapper[4612]: I1203 07:42:47.098532 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5fbdf1d-c06a-4145-8582-ff280680b2d5" path="/var/lib/kubelet/pods/c5fbdf1d-c06a-4145-8582-ff280680b2d5/volumes" Dec 03 07:42:47 crc kubenswrapper[4612]: I1203 07:42:47.136263 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:42:47 crc kubenswrapper[4612]: I1203 07:42:47.136338 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:42:48 crc kubenswrapper[4612]: I1203 07:42:48.200513 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:48 crc kubenswrapper[4612]: I1203 07:42:48.256793 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:49 crc kubenswrapper[4612]: I1203 07:42:49.986388 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tm992"] Dec 03 07:42:49 crc kubenswrapper[4612]: I1203 07:42:49.986903 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tm992" podUID="ff95ca02-ac62-4dfe-9a98-c6888d694e95" containerName="registry-server" containerID="cri-o://da03f13559fdfc72882a4548dba0a731ae92a4ff63f90058fd2cbe0a47032d9a" gracePeriod=2 Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.008967 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-w62v7" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.087640 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-jbbw2" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.438696 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.498402 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff95ca02-ac62-4dfe-9a98-c6888d694e95-utilities\") pod \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\" (UID: \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\") " Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.498520 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrkj7\" (UniqueName: \"kubernetes.io/projected/ff95ca02-ac62-4dfe-9a98-c6888d694e95-kube-api-access-lrkj7\") pod \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\" (UID: \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\") " Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.498594 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff95ca02-ac62-4dfe-9a98-c6888d694e95-catalog-content\") pod \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\" (UID: \"ff95ca02-ac62-4dfe-9a98-c6888d694e95\") " Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.499209 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff95ca02-ac62-4dfe-9a98-c6888d694e95-utilities" (OuterVolumeSpecName: "utilities") pod "ff95ca02-ac62-4dfe-9a98-c6888d694e95" (UID: "ff95ca02-ac62-4dfe-9a98-c6888d694e95"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.508081 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff95ca02-ac62-4dfe-9a98-c6888d694e95-kube-api-access-lrkj7" (OuterVolumeSpecName: "kube-api-access-lrkj7") pod "ff95ca02-ac62-4dfe-9a98-c6888d694e95" (UID: "ff95ca02-ac62-4dfe-9a98-c6888d694e95"). InnerVolumeSpecName "kube-api-access-lrkj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.600521 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrkj7\" (UniqueName: \"kubernetes.io/projected/ff95ca02-ac62-4dfe-9a98-c6888d694e95-kube-api-access-lrkj7\") on node \"crc\" DevicePath \"\"" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.600550 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff95ca02-ac62-4dfe-9a98-c6888d694e95-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.607793 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff95ca02-ac62-4dfe-9a98-c6888d694e95-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ff95ca02-ac62-4dfe-9a98-c6888d694e95" (UID: "ff95ca02-ac62-4dfe-9a98-c6888d694e95"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.630955 4612 generic.go:334] "Generic (PLEG): container finished" podID="ff95ca02-ac62-4dfe-9a98-c6888d694e95" containerID="da03f13559fdfc72882a4548dba0a731ae92a4ff63f90058fd2cbe0a47032d9a" exitCode=0 Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.630998 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tm992" event={"ID":"ff95ca02-ac62-4dfe-9a98-c6888d694e95","Type":"ContainerDied","Data":"da03f13559fdfc72882a4548dba0a731ae92a4ff63f90058fd2cbe0a47032d9a"} Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.631022 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tm992" event={"ID":"ff95ca02-ac62-4dfe-9a98-c6888d694e95","Type":"ContainerDied","Data":"5a57bb7707164313393cb7c78a2425e54f03a60a0411a0bbb29b0c120accdd6f"} Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.631022 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tm992" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.631038 4612 scope.go:117] "RemoveContainer" containerID="da03f13559fdfc72882a4548dba0a731ae92a4ff63f90058fd2cbe0a47032d9a" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.667343 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tm992"] Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.671761 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tm992"] Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.675222 4612 scope.go:117] "RemoveContainer" containerID="449084e162f16120959c26f39e9d544618d2033de9c0c440e3c08555ecc3f004" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.694495 4612 scope.go:117] "RemoveContainer" containerID="d35a7bc048f289001c9c3b17966d43bdb7f8e70b5f6169bbd2bf47b24c9b3f32" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.701491 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff95ca02-ac62-4dfe-9a98-c6888d694e95-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.713586 4612 scope.go:117] "RemoveContainer" containerID="da03f13559fdfc72882a4548dba0a731ae92a4ff63f90058fd2cbe0a47032d9a" Dec 03 07:42:50 crc kubenswrapper[4612]: E1203 07:42:50.714018 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da03f13559fdfc72882a4548dba0a731ae92a4ff63f90058fd2cbe0a47032d9a\": container with ID starting with da03f13559fdfc72882a4548dba0a731ae92a4ff63f90058fd2cbe0a47032d9a not found: ID does not exist" containerID="da03f13559fdfc72882a4548dba0a731ae92a4ff63f90058fd2cbe0a47032d9a" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.714062 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da03f13559fdfc72882a4548dba0a731ae92a4ff63f90058fd2cbe0a47032d9a"} err="failed to get container status \"da03f13559fdfc72882a4548dba0a731ae92a4ff63f90058fd2cbe0a47032d9a\": rpc error: code = NotFound desc = could not find container \"da03f13559fdfc72882a4548dba0a731ae92a4ff63f90058fd2cbe0a47032d9a\": container with ID starting with da03f13559fdfc72882a4548dba0a731ae92a4ff63f90058fd2cbe0a47032d9a not found: ID does not exist" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.714082 4612 scope.go:117] "RemoveContainer" containerID="449084e162f16120959c26f39e9d544618d2033de9c0c440e3c08555ecc3f004" Dec 03 07:42:50 crc kubenswrapper[4612]: E1203 07:42:50.714829 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"449084e162f16120959c26f39e9d544618d2033de9c0c440e3c08555ecc3f004\": container with ID starting with 449084e162f16120959c26f39e9d544618d2033de9c0c440e3c08555ecc3f004 not found: ID does not exist" containerID="449084e162f16120959c26f39e9d544618d2033de9c0c440e3c08555ecc3f004" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.714875 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"449084e162f16120959c26f39e9d544618d2033de9c0c440e3c08555ecc3f004"} err="failed to get container status \"449084e162f16120959c26f39e9d544618d2033de9c0c440e3c08555ecc3f004\": rpc error: code = NotFound desc = could not find container \"449084e162f16120959c26f39e9d544618d2033de9c0c440e3c08555ecc3f004\": container with ID starting with 449084e162f16120959c26f39e9d544618d2033de9c0c440e3c08555ecc3f004 not found: ID does not exist" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.714892 4612 scope.go:117] "RemoveContainer" containerID="d35a7bc048f289001c9c3b17966d43bdb7f8e70b5f6169bbd2bf47b24c9b3f32" Dec 03 07:42:50 crc kubenswrapper[4612]: E1203 07:42:50.715323 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d35a7bc048f289001c9c3b17966d43bdb7f8e70b5f6169bbd2bf47b24c9b3f32\": container with ID starting with d35a7bc048f289001c9c3b17966d43bdb7f8e70b5f6169bbd2bf47b24c9b3f32 not found: ID does not exist" containerID="d35a7bc048f289001c9c3b17966d43bdb7f8e70b5f6169bbd2bf47b24c9b3f32" Dec 03 07:42:50 crc kubenswrapper[4612]: I1203 07:42:50.715368 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d35a7bc048f289001c9c3b17966d43bdb7f8e70b5f6169bbd2bf47b24c9b3f32"} err="failed to get container status \"d35a7bc048f289001c9c3b17966d43bdb7f8e70b5f6169bbd2bf47b24c9b3f32\": rpc error: code = NotFound desc = could not find container \"d35a7bc048f289001c9c3b17966d43bdb7f8e70b5f6169bbd2bf47b24c9b3f32\": container with ID starting with d35a7bc048f289001c9c3b17966d43bdb7f8e70b5f6169bbd2bf47b24c9b3f32 not found: ID does not exist" Dec 03 07:42:51 crc kubenswrapper[4612]: I1203 07:42:51.098273 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff95ca02-ac62-4dfe-9a98-c6888d694e95" path="/var/lib/kubelet/pods/ff95ca02-ac62-4dfe-9a98-c6888d694e95/volumes" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.349728 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jtslm"] Dec 03 07:43:05 crc kubenswrapper[4612]: E1203 07:43:05.350616 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" containerName="extract-utilities" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.350632 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" containerName="extract-utilities" Dec 03 07:43:05 crc kubenswrapper[4612]: E1203 07:43:05.350642 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff95ca02-ac62-4dfe-9a98-c6888d694e95" containerName="extract-utilities" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.350652 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff95ca02-ac62-4dfe-9a98-c6888d694e95" containerName="extract-utilities" Dec 03 07:43:05 crc kubenswrapper[4612]: E1203 07:43:05.350670 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" containerName="extract-content" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.350677 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" containerName="extract-content" Dec 03 07:43:05 crc kubenswrapper[4612]: E1203 07:43:05.350688 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5fbdf1d-c06a-4145-8582-ff280680b2d5" containerName="registry-server" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.350696 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5fbdf1d-c06a-4145-8582-ff280680b2d5" containerName="registry-server" Dec 03 07:43:05 crc kubenswrapper[4612]: E1203 07:43:05.350707 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" containerName="registry-server" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.350715 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" containerName="registry-server" Dec 03 07:43:05 crc kubenswrapper[4612]: E1203 07:43:05.350737 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff95ca02-ac62-4dfe-9a98-c6888d694e95" containerName="extract-content" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.350745 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff95ca02-ac62-4dfe-9a98-c6888d694e95" containerName="extract-content" Dec 03 07:43:05 crc kubenswrapper[4612]: E1203 07:43:05.350760 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff95ca02-ac62-4dfe-9a98-c6888d694e95" containerName="registry-server" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.350767 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff95ca02-ac62-4dfe-9a98-c6888d694e95" containerName="registry-server" Dec 03 07:43:05 crc kubenswrapper[4612]: E1203 07:43:05.350778 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5fbdf1d-c06a-4145-8582-ff280680b2d5" containerName="extract-utilities" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.350786 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5fbdf1d-c06a-4145-8582-ff280680b2d5" containerName="extract-utilities" Dec 03 07:43:05 crc kubenswrapper[4612]: E1203 07:43:05.350801 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5fbdf1d-c06a-4145-8582-ff280680b2d5" containerName="extract-content" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.350808 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5fbdf1d-c06a-4145-8582-ff280680b2d5" containerName="extract-content" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.351020 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff95ca02-ac62-4dfe-9a98-c6888d694e95" containerName="registry-server" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.351045 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fcfa4b1-43d4-4c2c-a38d-5516a6383a8b" containerName="registry-server" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.351055 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5fbdf1d-c06a-4145-8582-ff280680b2d5" containerName="registry-server" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.351873 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-jtslm" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.361416 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.361800 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.361966 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-tt8lb" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.362240 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.376399 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jtslm"] Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.464008 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-s2c5k"] Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.465255 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.468526 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.491895 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-s2c5k"] Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.513038 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a6067bd-32ae-4f2e-89e3-6f33bd7f9729-config\") pod \"dnsmasq-dns-675f4bcbfc-jtslm\" (UID: \"0a6067bd-32ae-4f2e-89e3-6f33bd7f9729\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jtslm" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.513105 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-459zc\" (UniqueName: \"kubernetes.io/projected/0a6067bd-32ae-4f2e-89e3-6f33bd7f9729-kube-api-access-459zc\") pod \"dnsmasq-dns-675f4bcbfc-jtslm\" (UID: \"0a6067bd-32ae-4f2e-89e3-6f33bd7f9729\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jtslm" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.614435 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-s2c5k\" (UID: \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\") " pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.614548 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a6067bd-32ae-4f2e-89e3-6f33bd7f9729-config\") pod \"dnsmasq-dns-675f4bcbfc-jtslm\" (UID: \"0a6067bd-32ae-4f2e-89e3-6f33bd7f9729\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jtslm" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.614588 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-459zc\" (UniqueName: \"kubernetes.io/projected/0a6067bd-32ae-4f2e-89e3-6f33bd7f9729-kube-api-access-459zc\") pod \"dnsmasq-dns-675f4bcbfc-jtslm\" (UID: \"0a6067bd-32ae-4f2e-89e3-6f33bd7f9729\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jtslm" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.614646 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-config\") pod \"dnsmasq-dns-78dd6ddcc-s2c5k\" (UID: \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\") " pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.614691 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwm9z\" (UniqueName: \"kubernetes.io/projected/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-kube-api-access-rwm9z\") pod \"dnsmasq-dns-78dd6ddcc-s2c5k\" (UID: \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\") " pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.615493 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a6067bd-32ae-4f2e-89e3-6f33bd7f9729-config\") pod \"dnsmasq-dns-675f4bcbfc-jtslm\" (UID: \"0a6067bd-32ae-4f2e-89e3-6f33bd7f9729\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jtslm" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.653066 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-459zc\" (UniqueName: \"kubernetes.io/projected/0a6067bd-32ae-4f2e-89e3-6f33bd7f9729-kube-api-access-459zc\") pod \"dnsmasq-dns-675f4bcbfc-jtslm\" (UID: \"0a6067bd-32ae-4f2e-89e3-6f33bd7f9729\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jtslm" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.677552 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-jtslm" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.715996 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-config\") pod \"dnsmasq-dns-78dd6ddcc-s2c5k\" (UID: \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\") " pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.716294 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwm9z\" (UniqueName: \"kubernetes.io/projected/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-kube-api-access-rwm9z\") pod \"dnsmasq-dns-78dd6ddcc-s2c5k\" (UID: \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\") " pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.716355 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-s2c5k\" (UID: \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\") " pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.716816 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-config\") pod \"dnsmasq-dns-78dd6ddcc-s2c5k\" (UID: \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\") " pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.716955 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-s2c5k\" (UID: \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\") " pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.734907 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwm9z\" (UniqueName: \"kubernetes.io/projected/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-kube-api-access-rwm9z\") pod \"dnsmasq-dns-78dd6ddcc-s2c5k\" (UID: \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\") " pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" Dec 03 07:43:05 crc kubenswrapper[4612]: I1203 07:43:05.782686 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" Dec 03 07:43:06 crc kubenswrapper[4612]: I1203 07:43:06.185779 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jtslm"] Dec 03 07:43:06 crc kubenswrapper[4612]: I1203 07:43:06.195555 4612 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 07:43:06 crc kubenswrapper[4612]: I1203 07:43:06.363842 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-s2c5k"] Dec 03 07:43:06 crc kubenswrapper[4612]: W1203 07:43:06.374091 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddfaf8346_7797_4ccb_8a34_0f3e2b23af97.slice/crio-77db0f706dac2658eef30dc83a2777b38a020d81cdcc0929d62cb263b6d9e44c WatchSource:0}: Error finding container 77db0f706dac2658eef30dc83a2777b38a020d81cdcc0929d62cb263b6d9e44c: Status 404 returned error can't find the container with id 77db0f706dac2658eef30dc83a2777b38a020d81cdcc0929d62cb263b6d9e44c Dec 03 07:43:06 crc kubenswrapper[4612]: I1203 07:43:06.759544 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" event={"ID":"dfaf8346-7797-4ccb-8a34-0f3e2b23af97","Type":"ContainerStarted","Data":"77db0f706dac2658eef30dc83a2777b38a020d81cdcc0929d62cb263b6d9e44c"} Dec 03 07:43:06 crc kubenswrapper[4612]: I1203 07:43:06.760543 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-jtslm" event={"ID":"0a6067bd-32ae-4f2e-89e3-6f33bd7f9729","Type":"ContainerStarted","Data":"0084914c7509da160ea20c6d840d590e307082b3e06d454a78deb7b5c6e04c14"} Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.301099 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jtslm"] Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.330091 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qk5t8"] Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.331179 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.352642 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qk5t8"] Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.469476 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkmpv\" (UniqueName: \"kubernetes.io/projected/b9567553-5429-419e-9c2d-d7af697d7035-kube-api-access-bkmpv\") pod \"dnsmasq-dns-666b6646f7-qk5t8\" (UID: \"b9567553-5429-419e-9c2d-d7af697d7035\") " pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.469615 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9567553-5429-419e-9c2d-d7af697d7035-dns-svc\") pod \"dnsmasq-dns-666b6646f7-qk5t8\" (UID: \"b9567553-5429-419e-9c2d-d7af697d7035\") " pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.469843 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9567553-5429-419e-9c2d-d7af697d7035-config\") pod \"dnsmasq-dns-666b6646f7-qk5t8\" (UID: \"b9567553-5429-419e-9c2d-d7af697d7035\") " pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.594790 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkmpv\" (UniqueName: \"kubernetes.io/projected/b9567553-5429-419e-9c2d-d7af697d7035-kube-api-access-bkmpv\") pod \"dnsmasq-dns-666b6646f7-qk5t8\" (UID: \"b9567553-5429-419e-9c2d-d7af697d7035\") " pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.594841 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9567553-5429-419e-9c2d-d7af697d7035-dns-svc\") pod \"dnsmasq-dns-666b6646f7-qk5t8\" (UID: \"b9567553-5429-419e-9c2d-d7af697d7035\") " pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.594886 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9567553-5429-419e-9c2d-d7af697d7035-config\") pod \"dnsmasq-dns-666b6646f7-qk5t8\" (UID: \"b9567553-5429-419e-9c2d-d7af697d7035\") " pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.595740 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9567553-5429-419e-9c2d-d7af697d7035-config\") pod \"dnsmasq-dns-666b6646f7-qk5t8\" (UID: \"b9567553-5429-419e-9c2d-d7af697d7035\") " pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.595737 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9567553-5429-419e-9c2d-d7af697d7035-dns-svc\") pod \"dnsmasq-dns-666b6646f7-qk5t8\" (UID: \"b9567553-5429-419e-9c2d-d7af697d7035\") " pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.676439 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkmpv\" (UniqueName: \"kubernetes.io/projected/b9567553-5429-419e-9c2d-d7af697d7035-kube-api-access-bkmpv\") pod \"dnsmasq-dns-666b6646f7-qk5t8\" (UID: \"b9567553-5429-419e-9c2d-d7af697d7035\") " pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.730252 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-s2c5k"] Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.780493 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-7wkml"] Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.781841 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.793239 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-7wkml"] Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.796364 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47a166f9-373e-4d5e-aaf4-31beb9644df2-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-7wkml\" (UID: \"47a166f9-373e-4d5e-aaf4-31beb9644df2\") " pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.796401 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47a166f9-373e-4d5e-aaf4-31beb9644df2-config\") pod \"dnsmasq-dns-57d769cc4f-7wkml\" (UID: \"47a166f9-373e-4d5e-aaf4-31beb9644df2\") " pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.796455 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdvlq\" (UniqueName: \"kubernetes.io/projected/47a166f9-373e-4d5e-aaf4-31beb9644df2-kube-api-access-cdvlq\") pod \"dnsmasq-dns-57d769cc4f-7wkml\" (UID: \"47a166f9-373e-4d5e-aaf4-31beb9644df2\") " pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.897707 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47a166f9-373e-4d5e-aaf4-31beb9644df2-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-7wkml\" (UID: \"47a166f9-373e-4d5e-aaf4-31beb9644df2\") " pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.897753 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47a166f9-373e-4d5e-aaf4-31beb9644df2-config\") pod \"dnsmasq-dns-57d769cc4f-7wkml\" (UID: \"47a166f9-373e-4d5e-aaf4-31beb9644df2\") " pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.897820 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdvlq\" (UniqueName: \"kubernetes.io/projected/47a166f9-373e-4d5e-aaf4-31beb9644df2-kube-api-access-cdvlq\") pod \"dnsmasq-dns-57d769cc4f-7wkml\" (UID: \"47a166f9-373e-4d5e-aaf4-31beb9644df2\") " pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.899044 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47a166f9-373e-4d5e-aaf4-31beb9644df2-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-7wkml\" (UID: \"47a166f9-373e-4d5e-aaf4-31beb9644df2\") " pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.899227 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47a166f9-373e-4d5e-aaf4-31beb9644df2-config\") pod \"dnsmasq-dns-57d769cc4f-7wkml\" (UID: \"47a166f9-373e-4d5e-aaf4-31beb9644df2\") " pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.927698 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdvlq\" (UniqueName: \"kubernetes.io/projected/47a166f9-373e-4d5e-aaf4-31beb9644df2-kube-api-access-cdvlq\") pod \"dnsmasq-dns-57d769cc4f-7wkml\" (UID: \"47a166f9-373e-4d5e-aaf4-31beb9644df2\") " pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" Dec 03 07:43:08 crc kubenswrapper[4612]: I1203 07:43:08.956087 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.100607 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.509197 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.510523 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.517703 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.517881 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.518033 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.518159 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.518296 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.518432 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.518983 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-4q5j6" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.556064 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.572468 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qk5t8"] Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.621627 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.621689 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24t4q\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-kube-api-access-24t4q\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.621715 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-pod-info\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.621737 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.621781 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-server-conf\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.621800 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-config-data\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.621818 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.621845 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.621874 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.621888 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.621909 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.700640 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-7wkml"] Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.722753 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.722844 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-server-conf\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.722878 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-config-data\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.722900 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.722930 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.722964 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.722986 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.723019 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.723049 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.723109 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24t4q\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-kube-api-access-24t4q\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.723137 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-pod-info\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.723277 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.724865 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.726111 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.729894 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-config-data\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.730866 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.732263 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-server-conf\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.749853 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.750541 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24t4q\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-kube-api-access-24t4q\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.760316 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.760346 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-pod-info\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.771105 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.782663 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.849340 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.851005 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" event={"ID":"b9567553-5429-419e-9c2d-d7af697d7035","Type":"ContainerStarted","Data":"b25851b1fb5b47595c65034c6ae8f71ecf0f035e272dd015a32705642796034c"} Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.855154 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" event={"ID":"47a166f9-373e-4d5e-aaf4-31beb9644df2","Type":"ContainerStarted","Data":"4dc8c2141f68da6cf5252eb41ebbd044439d9cff60f5cfbb7f8d97cdbf4c710c"} Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.921287 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.926102 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.930049 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.969321 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.969554 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.969603 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.969670 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.969817 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.970015 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-wglvs" Dec 03 07:43:09 crc kubenswrapper[4612]: I1203 07:43:09.970291 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.059427 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.059819 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.059866 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.059893 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.059922 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.059963 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/52c23f49-b562-4a42-a8bc-b2214d1f8afe-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.059992 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.060025 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.060046 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jjf5\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-kube-api-access-2jjf5\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.060086 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/52c23f49-b562-4a42-a8bc-b2214d1f8afe-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.060120 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.161517 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.161549 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jjf5\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-kube-api-access-2jjf5\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.161579 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/52c23f49-b562-4a42-a8bc-b2214d1f8afe-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.161607 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.161637 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.161677 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.161706 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.161726 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.161747 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.161765 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/52c23f49-b562-4a42-a8bc-b2214d1f8afe-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.161780 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.162010 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.163227 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.163464 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.164113 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.164259 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.165728 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.167791 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.180857 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.186584 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/52c23f49-b562-4a42-a8bc-b2214d1f8afe-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.187024 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jjf5\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-kube-api-access-2jjf5\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.188855 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/52c23f49-b562-4a42-a8bc-b2214d1f8afe-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.191954 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.298545 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.636571 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.884251 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 07:43:10 crc kubenswrapper[4612]: I1203 07:43:10.896415 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1","Type":"ContainerStarted","Data":"071bf769ad8738bd74966b24b554a38ca7620980de1bd428da0783871c26086d"} Dec 03 07:43:10 crc kubenswrapper[4612]: W1203 07:43:10.907960 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod52c23f49_b562_4a42_a8bc_b2214d1f8afe.slice/crio-a12c32f60230de1b9bf6c5e49f46702ab08910384e9876c5b1ea24bccac59855 WatchSource:0}: Error finding container a12c32f60230de1b9bf6c5e49f46702ab08910384e9876c5b1ea24bccac59855: Status 404 returned error can't find the container with id a12c32f60230de1b9bf6c5e49f46702ab08910384e9876c5b1ea24bccac59855 Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.374648 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.376087 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.387498 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.392904 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.392995 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.393039 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-bl2rd" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.393498 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.396721 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.489268 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.489319 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-config-data-default\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.489347 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.489443 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.489494 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-kolla-config\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.489516 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-operator-scripts\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.489698 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmh6r\" (UniqueName: \"kubernetes.io/projected/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-kube-api-access-pmh6r\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.489756 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-config-data-generated\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.591495 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-kolla-config\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.591605 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-operator-scripts\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.591695 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmh6r\" (UniqueName: \"kubernetes.io/projected/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-kube-api-access-pmh6r\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.591728 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-config-data-generated\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.591802 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.591832 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-config-data-default\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.591880 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.591983 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.592441 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-kolla-config\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.592565 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.594060 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-config-data-generated\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.594137 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-config-data-default\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.594329 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-operator-scripts\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.620398 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmh6r\" (UniqueName: \"kubernetes.io/projected/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-kube-api-access-pmh6r\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.622257 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.628763 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10aae93b-2a6b-4a5a-a27e-9c2714777dfb-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.652634 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"10aae93b-2a6b-4a5a-a27e-9c2714777dfb\") " pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.738835 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 07:43:11 crc kubenswrapper[4612]: I1203 07:43:11.918380 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"52c23f49-b562-4a42-a8bc-b2214d1f8afe","Type":"ContainerStarted","Data":"a12c32f60230de1b9bf6c5e49f46702ab08910384e9876c5b1ea24bccac59855"} Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.467424 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.691708 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.703527 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.707042 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.710036 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-lczks" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.710398 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.710576 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.710711 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.813748 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af40404d-eb38-4281-ae78-fa546de7d6a2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.813799 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af40404d-eb38-4281-ae78-fa546de7d6a2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.813828 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.813866 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/af40404d-eb38-4281-ae78-fa546de7d6a2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.813892 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/af40404d-eb38-4281-ae78-fa546de7d6a2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.813927 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ctbt\" (UniqueName: \"kubernetes.io/projected/af40404d-eb38-4281-ae78-fa546de7d6a2-kube-api-access-5ctbt\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.813960 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/af40404d-eb38-4281-ae78-fa546de7d6a2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.813984 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/af40404d-eb38-4281-ae78-fa546de7d6a2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.914594 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ctbt\" (UniqueName: \"kubernetes.io/projected/af40404d-eb38-4281-ae78-fa546de7d6a2-kube-api-access-5ctbt\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.914640 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/af40404d-eb38-4281-ae78-fa546de7d6a2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.914664 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/af40404d-eb38-4281-ae78-fa546de7d6a2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.914710 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af40404d-eb38-4281-ae78-fa546de7d6a2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.914730 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af40404d-eb38-4281-ae78-fa546de7d6a2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.914754 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.914792 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/af40404d-eb38-4281-ae78-fa546de7d6a2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.914815 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/af40404d-eb38-4281-ae78-fa546de7d6a2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.916544 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.916849 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af40404d-eb38-4281-ae78-fa546de7d6a2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.917318 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/af40404d-eb38-4281-ae78-fa546de7d6a2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.917894 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/af40404d-eb38-4281-ae78-fa546de7d6a2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.918274 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/af40404d-eb38-4281-ae78-fa546de7d6a2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.923423 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/af40404d-eb38-4281-ae78-fa546de7d6a2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.948528 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af40404d-eb38-4281-ae78-fa546de7d6a2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.961137 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ctbt\" (UniqueName: \"kubernetes.io/projected/af40404d-eb38-4281-ae78-fa546de7d6a2-kube-api-access-5ctbt\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.976536 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"10aae93b-2a6b-4a5a-a27e-9c2714777dfb","Type":"ContainerStarted","Data":"b875751eaa7af501711a6fb1da56d18604a7c38e1beeef733fdbd4172f1d7d55"} Dec 03 07:43:12 crc kubenswrapper[4612]: I1203 07:43:12.977709 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"af40404d-eb38-4281-ae78-fa546de7d6a2\") " pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.052723 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.299167 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.300538 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.308102 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.310392 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-qjl9l" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.310571 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.310694 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.334817 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b894480f-fa85-4215-8599-23743aa1c262-kolla-config\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.334882 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b894480f-fa85-4215-8599-23743aa1c262-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.334962 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b894480f-fa85-4215-8599-23743aa1c262-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.334991 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpplk\" (UniqueName: \"kubernetes.io/projected/b894480f-fa85-4215-8599-23743aa1c262-kube-api-access-gpplk\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.335052 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b894480f-fa85-4215-8599-23743aa1c262-config-data\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.437154 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b894480f-fa85-4215-8599-23743aa1c262-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.437797 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpplk\" (UniqueName: \"kubernetes.io/projected/b894480f-fa85-4215-8599-23743aa1c262-kube-api-access-gpplk\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.437869 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b894480f-fa85-4215-8599-23743aa1c262-config-data\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.437922 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b894480f-fa85-4215-8599-23743aa1c262-kolla-config\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.437969 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b894480f-fa85-4215-8599-23743aa1c262-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.439438 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b894480f-fa85-4215-8599-23743aa1c262-kolla-config\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.440596 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b894480f-fa85-4215-8599-23743aa1c262-config-data\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.448598 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b894480f-fa85-4215-8599-23743aa1c262-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.448666 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b894480f-fa85-4215-8599-23743aa1c262-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.479653 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpplk\" (UniqueName: \"kubernetes.io/projected/b894480f-fa85-4215-8599-23743aa1c262-kube-api-access-gpplk\") pod \"memcached-0\" (UID: \"b894480f-fa85-4215-8599-23743aa1c262\") " pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.627951 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 07:43:13 crc kubenswrapper[4612]: I1203 07:43:13.960576 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 07:43:13 crc kubenswrapper[4612]: W1203 07:43:13.990386 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf40404d_eb38_4281_ae78_fa546de7d6a2.slice/crio-d3410ad55e99813a9110ca6cd7a0eed7a38ba3f7a56b3409de47616519f35d2a WatchSource:0}: Error finding container d3410ad55e99813a9110ca6cd7a0eed7a38ba3f7a56b3409de47616519f35d2a: Status 404 returned error can't find the container with id d3410ad55e99813a9110ca6cd7a0eed7a38ba3f7a56b3409de47616519f35d2a Dec 03 07:43:14 crc kubenswrapper[4612]: I1203 07:43:14.265064 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 03 07:43:15 crc kubenswrapper[4612]: I1203 07:43:15.049316 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 07:43:15 crc kubenswrapper[4612]: I1203 07:43:15.050983 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 07:43:15 crc kubenswrapper[4612]: I1203 07:43:15.052591 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"b894480f-fa85-4215-8599-23743aa1c262","Type":"ContainerStarted","Data":"7b1eb1774b9f974585528119697252c6cdc7275d59ef447454298c3546798ac9"} Dec 03 07:43:15 crc kubenswrapper[4612]: I1203 07:43:15.053541 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 07:43:15 crc kubenswrapper[4612]: I1203 07:43:15.055873 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-d4gw4" Dec 03 07:43:15 crc kubenswrapper[4612]: I1203 07:43:15.074215 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"af40404d-eb38-4281-ae78-fa546de7d6a2","Type":"ContainerStarted","Data":"d3410ad55e99813a9110ca6cd7a0eed7a38ba3f7a56b3409de47616519f35d2a"} Dec 03 07:43:15 crc kubenswrapper[4612]: I1203 07:43:15.092169 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6qb4\" (UniqueName: \"kubernetes.io/projected/f305106f-eafb-456f-b958-3895a14ec520-kube-api-access-s6qb4\") pod \"kube-state-metrics-0\" (UID: \"f305106f-eafb-456f-b958-3895a14ec520\") " pod="openstack/kube-state-metrics-0" Dec 03 07:43:15 crc kubenswrapper[4612]: I1203 07:43:15.198452 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6qb4\" (UniqueName: \"kubernetes.io/projected/f305106f-eafb-456f-b958-3895a14ec520-kube-api-access-s6qb4\") pod \"kube-state-metrics-0\" (UID: \"f305106f-eafb-456f-b958-3895a14ec520\") " pod="openstack/kube-state-metrics-0" Dec 03 07:43:15 crc kubenswrapper[4612]: I1203 07:43:15.253267 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6qb4\" (UniqueName: \"kubernetes.io/projected/f305106f-eafb-456f-b958-3895a14ec520-kube-api-access-s6qb4\") pod \"kube-state-metrics-0\" (UID: \"f305106f-eafb-456f-b958-3895a14ec520\") " pod="openstack/kube-state-metrics-0" Dec 03 07:43:15 crc kubenswrapper[4612]: I1203 07:43:15.387416 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 07:43:17 crc kubenswrapper[4612]: I1203 07:43:17.136421 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:43:17 crc kubenswrapper[4612]: I1203 07:43:17.136722 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:43:17 crc kubenswrapper[4612]: I1203 07:43:17.136762 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:43:17 crc kubenswrapper[4612]: I1203 07:43:17.137246 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"253ee07d7cd8000b3306a0841b31f595ab9d1ea323b7f796e6790764b3205b1e"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 07:43:17 crc kubenswrapper[4612]: I1203 07:43:17.137288 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://253ee07d7cd8000b3306a0841b31f595ab9d1ea323b7f796e6790764b3205b1e" gracePeriod=600 Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.156438 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="253ee07d7cd8000b3306a0841b31f595ab9d1ea323b7f796e6790764b3205b1e" exitCode=0 Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.156516 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"253ee07d7cd8000b3306a0841b31f595ab9d1ea323b7f796e6790764b3205b1e"} Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.156741 4612 scope.go:117] "RemoveContainer" containerID="6eb537965b8dc9beeeb70ed8225f9d3e2c9c9ba317f26825107f0eb87a41a235" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.754161 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-j7748"] Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.755385 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.765376 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-8vgnd" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.767275 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.767703 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.793593 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/385edacb-e835-42f4-a521-7c321043b989-scripts\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.793653 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/385edacb-e835-42f4-a521-7c321043b989-combined-ca-bundle\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.793681 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/385edacb-e835-42f4-a521-7c321043b989-var-log-ovn\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.793717 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/385edacb-e835-42f4-a521-7c321043b989-var-run\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.793793 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hg94j\" (UniqueName: \"kubernetes.io/projected/385edacb-e835-42f4-a521-7c321043b989-kube-api-access-hg94j\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.793830 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/385edacb-e835-42f4-a521-7c321043b989-var-run-ovn\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.793855 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/385edacb-e835-42f4-a521-7c321043b989-ovn-controller-tls-certs\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.818998 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-ng85x"] Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.820553 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.832191 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-ng85x"] Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.851326 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j7748"] Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.897317 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d487d317-a480-4234-9db2-b9018c5c5e38-var-log\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.897358 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d487d317-a480-4234-9db2-b9018c5c5e38-etc-ovs\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.897419 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hg94j\" (UniqueName: \"kubernetes.io/projected/385edacb-e835-42f4-a521-7c321043b989-kube-api-access-hg94j\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.897451 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/385edacb-e835-42f4-a521-7c321043b989-var-run-ovn\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.897470 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/385edacb-e835-42f4-a521-7c321043b989-ovn-controller-tls-certs\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.897527 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d487d317-a480-4234-9db2-b9018c5c5e38-var-lib\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.897547 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/385edacb-e835-42f4-a521-7c321043b989-scripts\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.897597 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/385edacb-e835-42f4-a521-7c321043b989-combined-ca-bundle\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.897618 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d487d317-a480-4234-9db2-b9018c5c5e38-var-run\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.897657 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2qwn\" (UniqueName: \"kubernetes.io/projected/d487d317-a480-4234-9db2-b9018c5c5e38-kube-api-access-r2qwn\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.897689 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/385edacb-e835-42f4-a521-7c321043b989-var-log-ovn\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.897716 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/385edacb-e835-42f4-a521-7c321043b989-var-run\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.897733 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d487d317-a480-4234-9db2-b9018c5c5e38-scripts\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.898697 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/385edacb-e835-42f4-a521-7c321043b989-var-run\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.898788 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/385edacb-e835-42f4-a521-7c321043b989-var-log-ovn\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.898883 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/385edacb-e835-42f4-a521-7c321043b989-var-run-ovn\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.903642 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/385edacb-e835-42f4-a521-7c321043b989-scripts\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.918916 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/385edacb-e835-42f4-a521-7c321043b989-ovn-controller-tls-certs\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.920901 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hg94j\" (UniqueName: \"kubernetes.io/projected/385edacb-e835-42f4-a521-7c321043b989-kube-api-access-hg94j\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.924817 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/385edacb-e835-42f4-a521-7c321043b989-combined-ca-bundle\") pod \"ovn-controller-j7748\" (UID: \"385edacb-e835-42f4-a521-7c321043b989\") " pod="openstack/ovn-controller-j7748" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.998609 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d487d317-a480-4234-9db2-b9018c5c5e38-scripts\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.998690 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d487d317-a480-4234-9db2-b9018c5c5e38-var-log\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.998717 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d487d317-a480-4234-9db2-b9018c5c5e38-etc-ovs\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.998799 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d487d317-a480-4234-9db2-b9018c5c5e38-var-lib\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.998836 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d487d317-a480-4234-9db2-b9018c5c5e38-var-run\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:18 crc kubenswrapper[4612]: I1203 07:43:18.998861 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2qwn\" (UniqueName: \"kubernetes.io/projected/d487d317-a480-4234-9db2-b9018c5c5e38-kube-api-access-r2qwn\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:19 crc kubenswrapper[4612]: I1203 07:43:19.000755 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d487d317-a480-4234-9db2-b9018c5c5e38-var-lib\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:19 crc kubenswrapper[4612]: I1203 07:43:19.000853 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d487d317-a480-4234-9db2-b9018c5c5e38-var-log\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:19 crc kubenswrapper[4612]: I1203 07:43:19.000894 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d487d317-a480-4234-9db2-b9018c5c5e38-var-run\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:19 crc kubenswrapper[4612]: I1203 07:43:19.000936 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d487d317-a480-4234-9db2-b9018c5c5e38-etc-ovs\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:19 crc kubenswrapper[4612]: I1203 07:43:19.001672 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d487d317-a480-4234-9db2-b9018c5c5e38-scripts\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:19 crc kubenswrapper[4612]: I1203 07:43:19.045631 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2qwn\" (UniqueName: \"kubernetes.io/projected/d487d317-a480-4234-9db2-b9018c5c5e38-kube-api-access-r2qwn\") pod \"ovn-controller-ovs-ng85x\" (UID: \"d487d317-a480-4234-9db2-b9018c5c5e38\") " pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:19 crc kubenswrapper[4612]: I1203 07:43:19.090488 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j7748" Dec 03 07:43:19 crc kubenswrapper[4612]: I1203 07:43:19.137510 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:19 crc kubenswrapper[4612]: E1203 07:43:19.702303 4612 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf87dca8_ceaa_424a_8074_7a63c648b84b.slice/crio-253ee07d7cd8000b3306a0841b31f595ab9d1ea323b7f796e6790764b3205b1e.scope\": RecentStats: unable to find data in memory cache]" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.005040 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.006383 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.017593 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.018134 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.018350 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.018590 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.018646 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-8crnb" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.035973 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.063394 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9463ced8-f487-4a16-9af3-07b736ca556c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.063623 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj6vn\" (UniqueName: \"kubernetes.io/projected/9463ced8-f487-4a16-9af3-07b736ca556c-kube-api-access-rj6vn\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.063731 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9463ced8-f487-4a16-9af3-07b736ca556c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.063768 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.063787 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9463ced8-f487-4a16-9af3-07b736ca556c-config\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.064851 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9463ced8-f487-4a16-9af3-07b736ca556c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.064878 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9463ced8-f487-4a16-9af3-07b736ca556c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.064920 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9463ced8-f487-4a16-9af3-07b736ca556c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.168492 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9463ced8-f487-4a16-9af3-07b736ca556c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.168543 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj6vn\" (UniqueName: \"kubernetes.io/projected/9463ced8-f487-4a16-9af3-07b736ca556c-kube-api-access-rj6vn\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.168578 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9463ced8-f487-4a16-9af3-07b736ca556c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.168621 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.168643 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9463ced8-f487-4a16-9af3-07b736ca556c-config\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.168705 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9463ced8-f487-4a16-9af3-07b736ca556c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.168726 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9463ced8-f487-4a16-9af3-07b736ca556c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.168750 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9463ced8-f487-4a16-9af3-07b736ca556c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.169320 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9463ced8-f487-4a16-9af3-07b736ca556c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.170924 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9463ced8-f487-4a16-9af3-07b736ca556c-config\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.170972 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.175808 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9463ced8-f487-4a16-9af3-07b736ca556c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.177429 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9463ced8-f487-4a16-9af3-07b736ca556c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.189299 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9463ced8-f487-4a16-9af3-07b736ca556c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.194437 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9463ced8-f487-4a16-9af3-07b736ca556c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.213203 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj6vn\" (UniqueName: \"kubernetes.io/projected/9463ced8-f487-4a16-9af3-07b736ca556c-kube-api-access-rj6vn\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.230612 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.242257 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.242595 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.249035 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9463ced8-f487-4a16-9af3-07b736ca556c\") " pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.249631 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.250162 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.250615 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-v4xcm" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.251129 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.269785 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/76717115-6292-47aa-bc1a-90c5e618967b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.269850 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76717115-6292-47aa-bc1a-90c5e618967b-config\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.269880 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76717115-6292-47aa-bc1a-90c5e618967b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.269900 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/76717115-6292-47aa-bc1a-90c5e618967b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.270044 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/76717115-6292-47aa-bc1a-90c5e618967b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.270093 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/76717115-6292-47aa-bc1a-90c5e618967b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.270138 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdtzx\" (UniqueName: \"kubernetes.io/projected/76717115-6292-47aa-bc1a-90c5e618967b-kube-api-access-gdtzx\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.270196 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.328113 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.371809 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76717115-6292-47aa-bc1a-90c5e618967b-config\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.371874 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76717115-6292-47aa-bc1a-90c5e618967b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.371904 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/76717115-6292-47aa-bc1a-90c5e618967b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.371965 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/76717115-6292-47aa-bc1a-90c5e618967b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.371997 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/76717115-6292-47aa-bc1a-90c5e618967b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.372034 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdtzx\" (UniqueName: \"kubernetes.io/projected/76717115-6292-47aa-bc1a-90c5e618967b-kube-api-access-gdtzx\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.372076 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.372124 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/76717115-6292-47aa-bc1a-90c5e618967b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.372496 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.372690 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/76717115-6292-47aa-bc1a-90c5e618967b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.373825 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/76717115-6292-47aa-bc1a-90c5e618967b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.373835 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76717115-6292-47aa-bc1a-90c5e618967b-config\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.376696 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/76717115-6292-47aa-bc1a-90c5e618967b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.386856 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/76717115-6292-47aa-bc1a-90c5e618967b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.390716 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdtzx\" (UniqueName: \"kubernetes.io/projected/76717115-6292-47aa-bc1a-90c5e618967b-kube-api-access-gdtzx\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.392601 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76717115-6292-47aa-bc1a-90c5e618967b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.399672 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"76717115-6292-47aa-bc1a-90c5e618967b\") " pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:22 crc kubenswrapper[4612]: I1203 07:43:22.622513 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:29 crc kubenswrapper[4612]: E1203 07:43:29.885730 4612 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf87dca8_ceaa_424a_8074_7a63c648b84b.slice/crio-253ee07d7cd8000b3306a0841b31f595ab9d1ea323b7f796e6790764b3205b1e.scope\": RecentStats: unable to find data in memory cache]" Dec 03 07:43:32 crc kubenswrapper[4612]: E1203 07:43:32.423917 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 03 07:43:32 crc kubenswrapper[4612]: E1203 07:43:32.424696 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pmh6r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(10aae93b-2a6b-4a5a-a27e-9c2714777dfb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:43:32 crc kubenswrapper[4612]: E1203 07:43:32.426328 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="10aae93b-2a6b-4a5a-a27e-9c2714777dfb" Dec 03 07:43:32 crc kubenswrapper[4612]: E1203 07:43:32.442788 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 03 07:43:32 crc kubenswrapper[4612]: E1203 07:43:32.442983 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5ctbt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(af40404d-eb38-4281-ae78-fa546de7d6a2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:43:32 crc kubenswrapper[4612]: E1203 07:43:32.444208 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="af40404d-eb38-4281-ae78-fa546de7d6a2" Dec 03 07:43:33 crc kubenswrapper[4612]: E1203 07:43:33.049373 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Dec 03 07:43:33 crc kubenswrapper[4612]: E1203 07:43:33.049560 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:ndbh55ch6h5c6hf6h95hcdhd6h574h54bh65bh5f5hc6h66dhb5h684h9h89h88h98hcfhf4h66ch596hcfh574h64fh64bhddh96h59bh5b9q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gpplk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(b894480f-fa85-4215-8599-23743aa1c262): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:43:33 crc kubenswrapper[4612]: E1203 07:43:33.050734 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="b894480f-fa85-4215-8599-23743aa1c262" Dec 03 07:43:33 crc kubenswrapper[4612]: E1203 07:43:33.317666 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="af40404d-eb38-4281-ae78-fa546de7d6a2" Dec 03 07:43:33 crc kubenswrapper[4612]: E1203 07:43:33.317714 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="b894480f-fa85-4215-8599-23743aa1c262" Dec 03 07:43:33 crc kubenswrapper[4612]: E1203 07:43:33.318181 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="10aae93b-2a6b-4a5a-a27e-9c2714777dfb" Dec 03 07:43:37 crc kubenswrapper[4612]: I1203 07:43:37.285423 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j7748"] Dec 03 07:43:37 crc kubenswrapper[4612]: I1203 07:43:37.441697 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 07:43:37 crc kubenswrapper[4612]: I1203 07:43:37.541040 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 07:43:37 crc kubenswrapper[4612]: E1203 07:43:37.889205 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 03 07:43:37 crc kubenswrapper[4612]: E1203 07:43:37.889624 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-459zc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-jtslm_openstack(0a6067bd-32ae-4f2e-89e3-6f33bd7f9729): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:43:37 crc kubenswrapper[4612]: E1203 07:43:37.891323 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-jtslm" podUID="0a6067bd-32ae-4f2e-89e3-6f33bd7f9729" Dec 03 07:43:37 crc kubenswrapper[4612]: E1203 07:43:37.906616 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 03 07:43:37 crc kubenswrapper[4612]: E1203 07:43:37.906738 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cdvlq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-7wkml_openstack(47a166f9-373e-4d5e-aaf4-31beb9644df2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:43:37 crc kubenswrapper[4612]: E1203 07:43:37.907932 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" podUID="47a166f9-373e-4d5e-aaf4-31beb9644df2" Dec 03 07:43:37 crc kubenswrapper[4612]: E1203 07:43:37.912174 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 03 07:43:37 crc kubenswrapper[4612]: E1203 07:43:37.912420 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bkmpv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-qk5t8_openstack(b9567553-5429-419e-9c2d-d7af697d7035): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:43:37 crc kubenswrapper[4612]: E1203 07:43:37.914142 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" podUID="b9567553-5429-419e-9c2d-d7af697d7035" Dec 03 07:43:37 crc kubenswrapper[4612]: E1203 07:43:37.977097 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 03 07:43:37 crc kubenswrapper[4612]: E1203 07:43:37.977373 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rwm9z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-s2c5k_openstack(dfaf8346-7797-4ccb-8a34-0f3e2b23af97): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:43:37 crc kubenswrapper[4612]: E1203 07:43:37.978896 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" podUID="dfaf8346-7797-4ccb-8a34-0f3e2b23af97" Dec 03 07:43:38 crc kubenswrapper[4612]: W1203 07:43:38.256578 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9463ced8_f487_4a16_9af3_07b736ca556c.slice/crio-71d81aedcc1f6455f0fd14ed1bf3ea0fe7a7dfe0137eb09f9d1456b860050c47 WatchSource:0}: Error finding container 71d81aedcc1f6455f0fd14ed1bf3ea0fe7a7dfe0137eb09f9d1456b860050c47: Status 404 returned error can't find the container with id 71d81aedcc1f6455f0fd14ed1bf3ea0fe7a7dfe0137eb09f9d1456b860050c47 Dec 03 07:43:38 crc kubenswrapper[4612]: I1203 07:43:38.257058 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 07:43:38 crc kubenswrapper[4612]: I1203 07:43:38.363517 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"9463ced8-f487-4a16-9af3-07b736ca556c","Type":"ContainerStarted","Data":"71d81aedcc1f6455f0fd14ed1bf3ea0fe7a7dfe0137eb09f9d1456b860050c47"} Dec 03 07:43:38 crc kubenswrapper[4612]: I1203 07:43:38.364600 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f305106f-eafb-456f-b958-3895a14ec520","Type":"ContainerStarted","Data":"46b23469281868f2f20254be28918002e53739bca381ffee50452e2446251dc1"} Dec 03 07:43:38 crc kubenswrapper[4612]: I1203 07:43:38.367772 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"76717115-6292-47aa-bc1a-90c5e618967b","Type":"ContainerStarted","Data":"0eb21fec97a17f2772e1097541b3b5f8a2512effccb49b5613a2c11c28829c27"} Dec 03 07:43:38 crc kubenswrapper[4612]: I1203 07:43:38.379385 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"f57887f822733dc51bac24ab820b117594d02a8eaa928e58dcd6bce042c04fbf"} Dec 03 07:43:38 crc kubenswrapper[4612]: I1203 07:43:38.380874 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j7748" event={"ID":"385edacb-e835-42f4-a521-7c321043b989","Type":"ContainerStarted","Data":"316dadbda4b4a974ea095e45f9aaa31a14cfe071e575ab7bca01051846ea3ce7"} Dec 03 07:43:38 crc kubenswrapper[4612]: E1203 07:43:38.387219 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" podUID="47a166f9-373e-4d5e-aaf4-31beb9644df2" Dec 03 07:43:38 crc kubenswrapper[4612]: E1203 07:43:38.387287 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" podUID="b9567553-5429-419e-9c2d-d7af697d7035" Dec 03 07:43:38 crc kubenswrapper[4612]: I1203 07:43:38.522762 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-ng85x"] Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.392022 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1","Type":"ContainerStarted","Data":"cd57db7eedc85dbf36d0fa09300b355454260f405835018b1b803eaa868cfe74"} Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.399740 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"52c23f49-b562-4a42-a8bc-b2214d1f8afe","Type":"ContainerStarted","Data":"8a5e9dbc4ad2bf4a254c8ed03a3f0ab6ef31feaf8816c7ca29b6f84aeaa17c8f"} Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.401560 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ng85x" event={"ID":"d487d317-a480-4234-9db2-b9018c5c5e38","Type":"ContainerStarted","Data":"7b946b5068ed8714ceb8a61d5259a1c87e48a1dea905cdf73e9a7a3da51b9e80"} Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.546624 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-jtslm" Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.548575 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.677785 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-config\") pod \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\" (UID: \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\") " Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.677864 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-dns-svc\") pod \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\" (UID: \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\") " Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.678002 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a6067bd-32ae-4f2e-89e3-6f33bd7f9729-config\") pod \"0a6067bd-32ae-4f2e-89e3-6f33bd7f9729\" (UID: \"0a6067bd-32ae-4f2e-89e3-6f33bd7f9729\") " Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.678047 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwm9z\" (UniqueName: \"kubernetes.io/projected/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-kube-api-access-rwm9z\") pod \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\" (UID: \"dfaf8346-7797-4ccb-8a34-0f3e2b23af97\") " Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.678217 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-459zc\" (UniqueName: \"kubernetes.io/projected/0a6067bd-32ae-4f2e-89e3-6f33bd7f9729-kube-api-access-459zc\") pod \"0a6067bd-32ae-4f2e-89e3-6f33bd7f9729\" (UID: \"0a6067bd-32ae-4f2e-89e3-6f33bd7f9729\") " Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.678299 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-config" (OuterVolumeSpecName: "config") pod "dfaf8346-7797-4ccb-8a34-0f3e2b23af97" (UID: "dfaf8346-7797-4ccb-8a34-0f3e2b23af97"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.678370 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a6067bd-32ae-4f2e-89e3-6f33bd7f9729-config" (OuterVolumeSpecName: "config") pod "0a6067bd-32ae-4f2e-89e3-6f33bd7f9729" (UID: "0a6067bd-32ae-4f2e-89e3-6f33bd7f9729"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.678691 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dfaf8346-7797-4ccb-8a34-0f3e2b23af97" (UID: "dfaf8346-7797-4ccb-8a34-0f3e2b23af97"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.678839 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.678853 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.678861 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a6067bd-32ae-4f2e-89e3-6f33bd7f9729-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.683932 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-kube-api-access-rwm9z" (OuterVolumeSpecName: "kube-api-access-rwm9z") pod "dfaf8346-7797-4ccb-8a34-0f3e2b23af97" (UID: "dfaf8346-7797-4ccb-8a34-0f3e2b23af97"). InnerVolumeSpecName "kube-api-access-rwm9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.700875 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a6067bd-32ae-4f2e-89e3-6f33bd7f9729-kube-api-access-459zc" (OuterVolumeSpecName: "kube-api-access-459zc") pod "0a6067bd-32ae-4f2e-89e3-6f33bd7f9729" (UID: "0a6067bd-32ae-4f2e-89e3-6f33bd7f9729"). InnerVolumeSpecName "kube-api-access-459zc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.780424 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwm9z\" (UniqueName: \"kubernetes.io/projected/dfaf8346-7797-4ccb-8a34-0f3e2b23af97-kube-api-access-rwm9z\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:39 crc kubenswrapper[4612]: I1203 07:43:39.780457 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-459zc\" (UniqueName: \"kubernetes.io/projected/0a6067bd-32ae-4f2e-89e3-6f33bd7f9729-kube-api-access-459zc\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:40 crc kubenswrapper[4612]: E1203 07:43:40.078925 4612 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf87dca8_ceaa_424a_8074_7a63c648b84b.slice/crio-253ee07d7cd8000b3306a0841b31f595ab9d1ea323b7f796e6790764b3205b1e.scope\": RecentStats: unable to find data in memory cache]" Dec 03 07:43:40 crc kubenswrapper[4612]: I1203 07:43:40.409786 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-jtslm" event={"ID":"0a6067bd-32ae-4f2e-89e3-6f33bd7f9729","Type":"ContainerDied","Data":"0084914c7509da160ea20c6d840d590e307082b3e06d454a78deb7b5c6e04c14"} Dec 03 07:43:40 crc kubenswrapper[4612]: I1203 07:43:40.409816 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-jtslm" Dec 03 07:43:40 crc kubenswrapper[4612]: I1203 07:43:40.411345 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" Dec 03 07:43:40 crc kubenswrapper[4612]: I1203 07:43:40.411424 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-s2c5k" event={"ID":"dfaf8346-7797-4ccb-8a34-0f3e2b23af97","Type":"ContainerDied","Data":"77db0f706dac2658eef30dc83a2777b38a020d81cdcc0929d62cb263b6d9e44c"} Dec 03 07:43:40 crc kubenswrapper[4612]: I1203 07:43:40.479282 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jtslm"] Dec 03 07:43:40 crc kubenswrapper[4612]: I1203 07:43:40.488969 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jtslm"] Dec 03 07:43:40 crc kubenswrapper[4612]: I1203 07:43:40.520775 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-s2c5k"] Dec 03 07:43:40 crc kubenswrapper[4612]: I1203 07:43:40.536546 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-s2c5k"] Dec 03 07:43:41 crc kubenswrapper[4612]: I1203 07:43:41.099161 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a6067bd-32ae-4f2e-89e3-6f33bd7f9729" path="/var/lib/kubelet/pods/0a6067bd-32ae-4f2e-89e3-6f33bd7f9729/volumes" Dec 03 07:43:41 crc kubenswrapper[4612]: I1203 07:43:41.099521 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfaf8346-7797-4ccb-8a34-0f3e2b23af97" path="/var/lib/kubelet/pods/dfaf8346-7797-4ccb-8a34-0f3e2b23af97/volumes" Dec 03 07:43:43 crc kubenswrapper[4612]: I1203 07:43:43.450711 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"9463ced8-f487-4a16-9af3-07b736ca556c","Type":"ContainerStarted","Data":"5f46e2006359ae50d4fd3bfe61724e509e2ecc39071fc73aa93381b65f86154e"} Dec 03 07:43:43 crc kubenswrapper[4612]: I1203 07:43:43.456453 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"76717115-6292-47aa-bc1a-90c5e618967b","Type":"ContainerStarted","Data":"fff513b9e059ec74f8b11b5c98f839c19a28f047db21936c0246046b79ab3b03"} Dec 03 07:43:44 crc kubenswrapper[4612]: I1203 07:43:44.473134 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j7748" event={"ID":"385edacb-e835-42f4-a521-7c321043b989","Type":"ContainerStarted","Data":"3b0d26fe54495a73cb8dc928e3747cce3bcac611c622d2093179691044bda5b5"} Dec 03 07:43:44 crc kubenswrapper[4612]: I1203 07:43:44.473449 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-j7748" Dec 03 07:43:44 crc kubenswrapper[4612]: I1203 07:43:44.477045 4612 generic.go:334] "Generic (PLEG): container finished" podID="d487d317-a480-4234-9db2-b9018c5c5e38" containerID="bb0472719b9479488a4f601c3ddd8115d7c139dd655016a37b0d80810e780d2f" exitCode=0 Dec 03 07:43:44 crc kubenswrapper[4612]: I1203 07:43:44.477118 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ng85x" event={"ID":"d487d317-a480-4234-9db2-b9018c5c5e38","Type":"ContainerDied","Data":"bb0472719b9479488a4f601c3ddd8115d7c139dd655016a37b0d80810e780d2f"} Dec 03 07:43:44 crc kubenswrapper[4612]: I1203 07:43:44.479822 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f305106f-eafb-456f-b958-3895a14ec520","Type":"ContainerStarted","Data":"0280b0747f0310cfcb70949b44d66648897d495b3953cb0f74f3bd6f8a1038b5"} Dec 03 07:43:44 crc kubenswrapper[4612]: I1203 07:43:44.479881 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 03 07:43:44 crc kubenswrapper[4612]: I1203 07:43:44.497768 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-j7748" podStartSLOduration=21.268019192 podStartE2EDuration="26.497750255s" podCreationTimestamp="2025-12-03 07:43:18 +0000 UTC" firstStartedPulling="2025-12-03 07:43:37.871149687 +0000 UTC m=+981.044507097" lastFinishedPulling="2025-12-03 07:43:43.10088076 +0000 UTC m=+986.274238160" observedRunningTime="2025-12-03 07:43:44.49464502 +0000 UTC m=+987.668002440" watchObservedRunningTime="2025-12-03 07:43:44.497750255 +0000 UTC m=+987.671107665" Dec 03 07:43:44 crc kubenswrapper[4612]: I1203 07:43:44.513012 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=24.236183519 podStartE2EDuration="29.512998155s" podCreationTimestamp="2025-12-03 07:43:15 +0000 UTC" firstStartedPulling="2025-12-03 07:43:37.921445707 +0000 UTC m=+981.094803097" lastFinishedPulling="2025-12-03 07:43:43.198260333 +0000 UTC m=+986.371617733" observedRunningTime="2025-12-03 07:43:44.509781557 +0000 UTC m=+987.683138957" watchObservedRunningTime="2025-12-03 07:43:44.512998155 +0000 UTC m=+987.686355555" Dec 03 07:43:45 crc kubenswrapper[4612]: I1203 07:43:45.490765 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ng85x" event={"ID":"d487d317-a480-4234-9db2-b9018c5c5e38","Type":"ContainerStarted","Data":"f9251360caad6796d209708beff0868cb13df66afdd12fd7a5f2caa2735c5bab"} Dec 03 07:43:45 crc kubenswrapper[4612]: I1203 07:43:45.491069 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:45 crc kubenswrapper[4612]: I1203 07:43:45.491085 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ng85x" event={"ID":"d487d317-a480-4234-9db2-b9018c5c5e38","Type":"ContainerStarted","Data":"c1be82f67305567f3a7cd898cf1951beb2636a38da508db0f0639c45c7f8ae38"} Dec 03 07:43:45 crc kubenswrapper[4612]: I1203 07:43:45.502271 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"b894480f-fa85-4215-8599-23743aa1c262","Type":"ContainerStarted","Data":"83b0f10d7ea0f6ef4aca98fabcaa037ceae34c999305fcb0118388e348d6f2aa"} Dec 03 07:43:45 crc kubenswrapper[4612]: I1203 07:43:45.512777 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-ng85x" podStartSLOduration=23.016717183 podStartE2EDuration="27.512758748s" podCreationTimestamp="2025-12-03 07:43:18 +0000 UTC" firstStartedPulling="2025-12-03 07:43:38.604845745 +0000 UTC m=+981.778203145" lastFinishedPulling="2025-12-03 07:43:43.10088731 +0000 UTC m=+986.274244710" observedRunningTime="2025-12-03 07:43:45.506582518 +0000 UTC m=+988.679939918" watchObservedRunningTime="2025-12-03 07:43:45.512758748 +0000 UTC m=+988.686116148" Dec 03 07:43:45 crc kubenswrapper[4612]: I1203 07:43:45.527171 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.223710463 podStartE2EDuration="32.527144077s" podCreationTimestamp="2025-12-03 07:43:13 +0000 UTC" firstStartedPulling="2025-12-03 07:43:14.282283705 +0000 UTC m=+957.455641105" lastFinishedPulling="2025-12-03 07:43:44.585717319 +0000 UTC m=+987.759074719" observedRunningTime="2025-12-03 07:43:45.522273999 +0000 UTC m=+988.695631389" watchObservedRunningTime="2025-12-03 07:43:45.527144077 +0000 UTC m=+988.700501477" Dec 03 07:43:46 crc kubenswrapper[4612]: I1203 07:43:46.514392 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:43:48 crc kubenswrapper[4612]: I1203 07:43:48.629274 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 03 07:43:50 crc kubenswrapper[4612]: E1203 07:43:50.287329 4612 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf87dca8_ceaa_424a_8074_7a63c648b84b.slice/crio-253ee07d7cd8000b3306a0841b31f595ab9d1ea323b7f796e6790764b3205b1e.scope\": RecentStats: unable to find data in memory cache]" Dec 03 07:43:52 crc kubenswrapper[4612]: I1203 07:43:52.571714 4612 generic.go:334] "Generic (PLEG): container finished" podID="b9567553-5429-419e-9c2d-d7af697d7035" containerID="3baff9760363c207fe5de7827ec8d2e316ce36b54eb1d2ae8a044ccf9cc3c977" exitCode=0 Dec 03 07:43:52 crc kubenswrapper[4612]: I1203 07:43:52.571807 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" event={"ID":"b9567553-5429-419e-9c2d-d7af697d7035","Type":"ContainerDied","Data":"3baff9760363c207fe5de7827ec8d2e316ce36b54eb1d2ae8a044ccf9cc3c977"} Dec 03 07:43:52 crc kubenswrapper[4612]: I1203 07:43:52.575288 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"10aae93b-2a6b-4a5a-a27e-9c2714777dfb","Type":"ContainerStarted","Data":"ffbeb7e10a7e212d2fe1036e34e1b9c846043c3000c04e39cfc97373c12e6f99"} Dec 03 07:43:52 crc kubenswrapper[4612]: I1203 07:43:52.580368 4612 generic.go:334] "Generic (PLEG): container finished" podID="47a166f9-373e-4d5e-aaf4-31beb9644df2" containerID="7b2150f3c1b07624fbf6c986b801d920431e52b52aab23b011d027f5b2051eb5" exitCode=0 Dec 03 07:43:52 crc kubenswrapper[4612]: I1203 07:43:52.580434 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" event={"ID":"47a166f9-373e-4d5e-aaf4-31beb9644df2","Type":"ContainerDied","Data":"7b2150f3c1b07624fbf6c986b801d920431e52b52aab23b011d027f5b2051eb5"} Dec 03 07:43:52 crc kubenswrapper[4612]: I1203 07:43:52.582753 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"af40404d-eb38-4281-ae78-fa546de7d6a2","Type":"ContainerStarted","Data":"cdf57c7576cd5ae043948c208e38f9aef9e28f0f60a2b22a2d0265cba185ef8a"} Dec 03 07:43:52 crc kubenswrapper[4612]: I1203 07:43:52.585966 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"9463ced8-f487-4a16-9af3-07b736ca556c","Type":"ContainerStarted","Data":"3853b6a67f1386bd3368aff5a36da5b6e28df6bfead42bfcc91d0d27f1ca6fc9"} Dec 03 07:43:52 crc kubenswrapper[4612]: I1203 07:43:52.598635 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"76717115-6292-47aa-bc1a-90c5e618967b","Type":"ContainerStarted","Data":"acfa1edb2e1749f0b5855dfd3a4b22c66accb7f6651804ebd500ae5b59772402"} Dec 03 07:43:52 crc kubenswrapper[4612]: I1203 07:43:52.624581 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:52 crc kubenswrapper[4612]: I1203 07:43:52.624738 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:52 crc kubenswrapper[4612]: I1203 07:43:52.654893 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=20.026947431 podStartE2EDuration="32.654860271s" podCreationTimestamp="2025-12-03 07:43:20 +0000 UTC" firstStartedPulling="2025-12-03 07:43:38.259572279 +0000 UTC m=+981.432929679" lastFinishedPulling="2025-12-03 07:43:50.887485119 +0000 UTC m=+994.060842519" observedRunningTime="2025-12-03 07:43:52.654722048 +0000 UTC m=+995.828079448" watchObservedRunningTime="2025-12-03 07:43:52.654860271 +0000 UTC m=+995.828217681" Dec 03 07:43:52 crc kubenswrapper[4612]: I1203 07:43:52.695813 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:52 crc kubenswrapper[4612]: I1203 07:43:52.717971 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=18.743385512 podStartE2EDuration="31.717936161s" podCreationTimestamp="2025-12-03 07:43:21 +0000 UTC" firstStartedPulling="2025-12-03 07:43:37.873655957 +0000 UTC m=+981.047013367" lastFinishedPulling="2025-12-03 07:43:50.848206606 +0000 UTC m=+994.021564016" observedRunningTime="2025-12-03 07:43:52.717765597 +0000 UTC m=+995.891123017" watchObservedRunningTime="2025-12-03 07:43:52.717936161 +0000 UTC m=+995.891293561" Dec 03 07:43:53 crc kubenswrapper[4612]: I1203 07:43:53.608974 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" event={"ID":"b9567553-5429-419e-9c2d-d7af697d7035","Type":"ContainerStarted","Data":"dd6142f14b963bd9691b84aea771a81d9534a9f215548aaebc03bd303d65e07f"} Dec 03 07:43:53 crc kubenswrapper[4612]: I1203 07:43:53.609483 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" Dec 03 07:43:53 crc kubenswrapper[4612]: I1203 07:43:53.611428 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" event={"ID":"47a166f9-373e-4d5e-aaf4-31beb9644df2","Type":"ContainerStarted","Data":"9d212c26c0db78b38734005a7da7bf653df067ce5a8d3cbf0f4f931e6344160a"} Dec 03 07:43:53 crc kubenswrapper[4612]: I1203 07:43:53.628624 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" podStartSLOduration=3.814569954 podStartE2EDuration="45.628606783s" podCreationTimestamp="2025-12-03 07:43:08 +0000 UTC" firstStartedPulling="2025-12-03 07:43:09.634647622 +0000 UTC m=+952.808005022" lastFinishedPulling="2025-12-03 07:43:51.448684411 +0000 UTC m=+994.622041851" observedRunningTime="2025-12-03 07:43:53.627991278 +0000 UTC m=+996.801348688" watchObservedRunningTime="2025-12-03 07:43:53.628606783 +0000 UTC m=+996.801964183" Dec 03 07:43:53 crc kubenswrapper[4612]: I1203 07:43:53.630205 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 03 07:43:53 crc kubenswrapper[4612]: I1203 07:43:53.656313 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 03 07:43:53 crc kubenswrapper[4612]: I1203 07:43:53.659565 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" podStartSLOduration=3.9384186679999997 podStartE2EDuration="45.659548903s" podCreationTimestamp="2025-12-03 07:43:08 +0000 UTC" firstStartedPulling="2025-12-03 07:43:09.728169661 +0000 UTC m=+952.901527061" lastFinishedPulling="2025-12-03 07:43:51.449299886 +0000 UTC m=+994.622657296" observedRunningTime="2025-12-03 07:43:53.656663123 +0000 UTC m=+996.830020543" watchObservedRunningTime="2025-12-03 07:43:53.659548903 +0000 UTC m=+996.832906313" Dec 03 07:43:53 crc kubenswrapper[4612]: I1203 07:43:53.974341 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-7wkml"] Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.022623 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xv5zr"] Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.023970 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.026592 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.037426 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xv5zr"] Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.081786 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-zc5qp"] Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.082920 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.085885 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.102114 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.108449 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-zc5qp"] Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.206684 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-config\") pod \"dnsmasq-dns-6bc7876d45-xv5zr\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.206723 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07101baf-17d1-4e64-8c8a-4ee57ab33873-config\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.206745 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/07101baf-17d1-4e64-8c8a-4ee57ab33873-ovn-rundir\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.206786 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8qzx\" (UniqueName: \"kubernetes.io/projected/07101baf-17d1-4e64-8c8a-4ee57ab33873-kube-api-access-t8qzx\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.206804 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c25cb\" (UniqueName: \"kubernetes.io/projected/527f26a0-8fba-47b1-99a9-551ae7ffaca8-kube-api-access-c25cb\") pod \"dnsmasq-dns-6bc7876d45-xv5zr\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.206832 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-xv5zr\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.206849 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/07101baf-17d1-4e64-8c8a-4ee57ab33873-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.206866 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07101baf-17d1-4e64-8c8a-4ee57ab33873-combined-ca-bundle\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.206915 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/07101baf-17d1-4e64-8c8a-4ee57ab33873-ovs-rundir\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.206968 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-xv5zr\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.308084 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8qzx\" (UniqueName: \"kubernetes.io/projected/07101baf-17d1-4e64-8c8a-4ee57ab33873-kube-api-access-t8qzx\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.308122 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c25cb\" (UniqueName: \"kubernetes.io/projected/527f26a0-8fba-47b1-99a9-551ae7ffaca8-kube-api-access-c25cb\") pod \"dnsmasq-dns-6bc7876d45-xv5zr\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.308148 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-xv5zr\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.308169 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/07101baf-17d1-4e64-8c8a-4ee57ab33873-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.308186 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07101baf-17d1-4e64-8c8a-4ee57ab33873-combined-ca-bundle\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.308223 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/07101baf-17d1-4e64-8c8a-4ee57ab33873-ovs-rundir\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.308911 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-xv5zr\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.308518 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/07101baf-17d1-4e64-8c8a-4ee57ab33873-ovs-rundir\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.309045 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-config\") pod \"dnsmasq-dns-6bc7876d45-xv5zr\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.309075 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07101baf-17d1-4e64-8c8a-4ee57ab33873-config\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.309098 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/07101baf-17d1-4e64-8c8a-4ee57ab33873-ovn-rundir\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.309257 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/07101baf-17d1-4e64-8c8a-4ee57ab33873-ovn-rundir\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.309972 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07101baf-17d1-4e64-8c8a-4ee57ab33873-config\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.310744 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-config\") pod \"dnsmasq-dns-6bc7876d45-xv5zr\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.310841 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-xv5zr\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.310899 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-xv5zr\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.313264 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07101baf-17d1-4e64-8c8a-4ee57ab33873-combined-ca-bundle\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.322452 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/07101baf-17d1-4e64-8c8a-4ee57ab33873-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.327701 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c25cb\" (UniqueName: \"kubernetes.io/projected/527f26a0-8fba-47b1-99a9-551ae7ffaca8-kube-api-access-c25cb\") pod \"dnsmasq-dns-6bc7876d45-xv5zr\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.333325 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8qzx\" (UniqueName: \"kubernetes.io/projected/07101baf-17d1-4e64-8c8a-4ee57ab33873-kube-api-access-t8qzx\") pod \"ovn-controller-metrics-zc5qp\" (UID: \"07101baf-17d1-4e64-8c8a-4ee57ab33873\") " pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.346297 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.404803 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-zc5qp" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.447173 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qk5t8"] Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.489915 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-sgg7f"] Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.502813 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.505975 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.516250 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-sgg7f"] Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.616231 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.616574 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.616604 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-config\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.616649 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-dns-svc\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.616738 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rlpp\" (UniqueName: \"kubernetes.io/projected/e52b6d90-ed35-4d7c-8f9a-a0030280959f-kube-api-access-9rlpp\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.620176 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" podUID="47a166f9-373e-4d5e-aaf4-31beb9644df2" containerName="dnsmasq-dns" containerID="cri-o://9d212c26c0db78b38734005a7da7bf653df067ce5a8d3cbf0f4f931e6344160a" gracePeriod=10 Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.719823 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.719887 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.719920 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-config\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.720040 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-dns-svc\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.720208 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rlpp\" (UniqueName: \"kubernetes.io/projected/e52b6d90-ed35-4d7c-8f9a-a0030280959f-kube-api-access-9rlpp\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.720660 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.720783 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-config\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.721716 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-dns-svc\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.722358 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.741118 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rlpp\" (UniqueName: \"kubernetes.io/projected/e52b6d90-ed35-4d7c-8f9a-a0030280959f-kube-api-access-9rlpp\") pod \"dnsmasq-dns-8554648995-sgg7f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.872434 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.918784 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xv5zr"] Dec 03 07:43:54 crc kubenswrapper[4612]: I1203 07:43:54.970969 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" Dec 03 07:43:55 crc kubenswrapper[4612]: W1203 07:43:55.031372 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07101baf_17d1_4e64_8c8a_4ee57ab33873.slice/crio-7aea97365d2a8840ec45c281a7803519f3690f7c42742a945c16aec509cef919 WatchSource:0}: Error finding container 7aea97365d2a8840ec45c281a7803519f3690f7c42742a945c16aec509cef919: Status 404 returned error can't find the container with id 7aea97365d2a8840ec45c281a7803519f3690f7c42742a945c16aec509cef919 Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.035720 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-zc5qp"] Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.131731 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47a166f9-373e-4d5e-aaf4-31beb9644df2-config\") pod \"47a166f9-373e-4d5e-aaf4-31beb9644df2\" (UID: \"47a166f9-373e-4d5e-aaf4-31beb9644df2\") " Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.132133 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdvlq\" (UniqueName: \"kubernetes.io/projected/47a166f9-373e-4d5e-aaf4-31beb9644df2-kube-api-access-cdvlq\") pod \"47a166f9-373e-4d5e-aaf4-31beb9644df2\" (UID: \"47a166f9-373e-4d5e-aaf4-31beb9644df2\") " Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.132209 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47a166f9-373e-4d5e-aaf4-31beb9644df2-dns-svc\") pod \"47a166f9-373e-4d5e-aaf4-31beb9644df2\" (UID: \"47a166f9-373e-4d5e-aaf4-31beb9644df2\") " Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.137282 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47a166f9-373e-4d5e-aaf4-31beb9644df2-kube-api-access-cdvlq" (OuterVolumeSpecName: "kube-api-access-cdvlq") pod "47a166f9-373e-4d5e-aaf4-31beb9644df2" (UID: "47a166f9-373e-4d5e-aaf4-31beb9644df2"). InnerVolumeSpecName "kube-api-access-cdvlq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.185051 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47a166f9-373e-4d5e-aaf4-31beb9644df2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "47a166f9-373e-4d5e-aaf4-31beb9644df2" (UID: "47a166f9-373e-4d5e-aaf4-31beb9644df2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.201101 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47a166f9-373e-4d5e-aaf4-31beb9644df2-config" (OuterVolumeSpecName: "config") pod "47a166f9-373e-4d5e-aaf4-31beb9644df2" (UID: "47a166f9-373e-4d5e-aaf4-31beb9644df2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.234022 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47a166f9-373e-4d5e-aaf4-31beb9644df2-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.234060 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdvlq\" (UniqueName: \"kubernetes.io/projected/47a166f9-373e-4d5e-aaf4-31beb9644df2-kube-api-access-cdvlq\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.234075 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47a166f9-373e-4d5e-aaf4-31beb9644df2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.328477 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.399695 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.430824 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.493722 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-sgg7f"] Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.593720 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xv5zr"] Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.631926 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-zc5qp" event={"ID":"07101baf-17d1-4e64-8c8a-4ee57ab33873","Type":"ContainerStarted","Data":"8ad00bd5270ec8a652a5e9d64d226d7db2d360120af0142afbd9d8a17f9177d5"} Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.631983 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-zc5qp" event={"ID":"07101baf-17d1-4e64-8c8a-4ee57ab33873","Type":"ContainerStarted","Data":"7aea97365d2a8840ec45c281a7803519f3690f7c42742a945c16aec509cef919"} Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.646828 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-8xkg7"] Dec 03 07:43:55 crc kubenswrapper[4612]: E1203 07:43:55.652842 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47a166f9-373e-4d5e-aaf4-31beb9644df2" containerName="dnsmasq-dns" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.652872 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="47a166f9-373e-4d5e-aaf4-31beb9644df2" containerName="dnsmasq-dns" Dec 03 07:43:55 crc kubenswrapper[4612]: E1203 07:43:55.652896 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47a166f9-373e-4d5e-aaf4-31beb9644df2" containerName="init" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.652902 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="47a166f9-373e-4d5e-aaf4-31beb9644df2" containerName="init" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.653093 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="47a166f9-373e-4d5e-aaf4-31beb9644df2" containerName="dnsmasq-dns" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.653890 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.656844 4612 generic.go:334] "Generic (PLEG): container finished" podID="af40404d-eb38-4281-ae78-fa546de7d6a2" containerID="cdf57c7576cd5ae043948c208e38f9aef9e28f0f60a2b22a2d0265cba185ef8a" exitCode=0 Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.656934 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"af40404d-eb38-4281-ae78-fa546de7d6a2","Type":"ContainerDied","Data":"cdf57c7576cd5ae043948c208e38f9aef9e28f0f60a2b22a2d0265cba185ef8a"} Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.663044 4612 generic.go:334] "Generic (PLEG): container finished" podID="527f26a0-8fba-47b1-99a9-551ae7ffaca8" containerID="ddf91f0b4c1b77e38095a8c8c2186db43345626cece3fb6c58fc64eb2773ad8d" exitCode=0 Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.663692 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" event={"ID":"527f26a0-8fba-47b1-99a9-551ae7ffaca8","Type":"ContainerDied","Data":"ddf91f0b4c1b77e38095a8c8c2186db43345626cece3fb6c58fc64eb2773ad8d"} Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.663722 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" event={"ID":"527f26a0-8fba-47b1-99a9-551ae7ffaca8","Type":"ContainerStarted","Data":"9142faf65fb7829aa6f56d85ebfbe831976149fd5cbd4b710fccb79a40c14ded"} Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.708591 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-sgg7f" event={"ID":"e52b6d90-ed35-4d7c-8f9a-a0030280959f","Type":"ContainerStarted","Data":"06d72f94076d76ccd9837f12d6c505e07c0230a78fa72e4b64b2e18191e5ab85"} Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.721857 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-8xkg7"] Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.722392 4612 generic.go:334] "Generic (PLEG): container finished" podID="10aae93b-2a6b-4a5a-a27e-9c2714777dfb" containerID="ffbeb7e10a7e212d2fe1036e34e1b9c846043c3000c04e39cfc97373c12e6f99" exitCode=0 Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.722479 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"10aae93b-2a6b-4a5a-a27e-9c2714777dfb","Type":"ContainerDied","Data":"ffbeb7e10a7e212d2fe1036e34e1b9c846043c3000c04e39cfc97373c12e6f99"} Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.736424 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-zc5qp" podStartSLOduration=1.736405483 podStartE2EDuration="1.736405483s" podCreationTimestamp="2025-12-03 07:43:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:43:55.685789545 +0000 UTC m=+998.859146945" watchObservedRunningTime="2025-12-03 07:43:55.736405483 +0000 UTC m=+998.909762873" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.745155 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wksx\" (UniqueName: \"kubernetes.io/projected/a9dfbe94-ffdc-4b45-9a25-782be58c3683-kube-api-access-5wksx\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.745291 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.745320 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-config\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.745376 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.745415 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.773339 4612 generic.go:334] "Generic (PLEG): container finished" podID="47a166f9-373e-4d5e-aaf4-31beb9644df2" containerID="9d212c26c0db78b38734005a7da7bf653df067ce5a8d3cbf0f4f931e6344160a" exitCode=0 Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.773403 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.773468 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" event={"ID":"47a166f9-373e-4d5e-aaf4-31beb9644df2","Type":"ContainerDied","Data":"9d212c26c0db78b38734005a7da7bf653df067ce5a8d3cbf0f4f931e6344160a"} Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.773495 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-7wkml" event={"ID":"47a166f9-373e-4d5e-aaf4-31beb9644df2","Type":"ContainerDied","Data":"4dc8c2141f68da6cf5252eb41ebbd044439d9cff60f5cfbb7f8d97cdbf4c710c"} Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.773513 4612 scope.go:117] "RemoveContainer" containerID="9d212c26c0db78b38734005a7da7bf653df067ce5a8d3cbf0f4f931e6344160a" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.774166 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" podUID="b9567553-5429-419e-9c2d-d7af697d7035" containerName="dnsmasq-dns" containerID="cri-o://dd6142f14b963bd9691b84aea771a81d9534a9f215548aaebc03bd303d65e07f" gracePeriod=10 Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.774363 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.847247 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wksx\" (UniqueName: \"kubernetes.io/projected/a9dfbe94-ffdc-4b45-9a25-782be58c3683-kube-api-access-5wksx\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.847639 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.847683 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-config\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.847742 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.847796 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.848570 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.849118 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-config\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.850408 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.850508 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.869171 4612 scope.go:117] "RemoveContainer" containerID="7b2150f3c1b07624fbf6c986b801d920431e52b52aab23b011d027f5b2051eb5" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.869355 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.889037 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wksx\" (UniqueName: \"kubernetes.io/projected/a9dfbe94-ffdc-4b45-9a25-782be58c3683-kube-api-access-5wksx\") pod \"dnsmasq-dns-b8fbc5445-8xkg7\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.944024 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-7wkml"] Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.952620 4612 scope.go:117] "RemoveContainer" containerID="9d212c26c0db78b38734005a7da7bf653df067ce5a8d3cbf0f4f931e6344160a" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.954222 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-7wkml"] Dec 03 07:43:55 crc kubenswrapper[4612]: E1203 07:43:55.961936 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d212c26c0db78b38734005a7da7bf653df067ce5a8d3cbf0f4f931e6344160a\": container with ID starting with 9d212c26c0db78b38734005a7da7bf653df067ce5a8d3cbf0f4f931e6344160a not found: ID does not exist" containerID="9d212c26c0db78b38734005a7da7bf653df067ce5a8d3cbf0f4f931e6344160a" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.962000 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d212c26c0db78b38734005a7da7bf653df067ce5a8d3cbf0f4f931e6344160a"} err="failed to get container status \"9d212c26c0db78b38734005a7da7bf653df067ce5a8d3cbf0f4f931e6344160a\": rpc error: code = NotFound desc = could not find container \"9d212c26c0db78b38734005a7da7bf653df067ce5a8d3cbf0f4f931e6344160a\": container with ID starting with 9d212c26c0db78b38734005a7da7bf653df067ce5a8d3cbf0f4f931e6344160a not found: ID does not exist" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.962031 4612 scope.go:117] "RemoveContainer" containerID="7b2150f3c1b07624fbf6c986b801d920431e52b52aab23b011d027f5b2051eb5" Dec 03 07:43:55 crc kubenswrapper[4612]: E1203 07:43:55.971314 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b2150f3c1b07624fbf6c986b801d920431e52b52aab23b011d027f5b2051eb5\": container with ID starting with 7b2150f3c1b07624fbf6c986b801d920431e52b52aab23b011d027f5b2051eb5 not found: ID does not exist" containerID="7b2150f3c1b07624fbf6c986b801d920431e52b52aab23b011d027f5b2051eb5" Dec 03 07:43:55 crc kubenswrapper[4612]: I1203 07:43:55.971354 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b2150f3c1b07624fbf6c986b801d920431e52b52aab23b011d027f5b2051eb5"} err="failed to get container status \"7b2150f3c1b07624fbf6c986b801d920431e52b52aab23b011d027f5b2051eb5\": rpc error: code = NotFound desc = could not find container \"7b2150f3c1b07624fbf6c986b801d920431e52b52aab23b011d027f5b2051eb5\": container with ID starting with 7b2150f3c1b07624fbf6c986b801d920431e52b52aab23b011d027f5b2051eb5 not found: ID does not exist" Dec 03 07:43:56 crc kubenswrapper[4612]: E1203 07:43:56.094406 4612 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 03 07:43:56 crc kubenswrapper[4612]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/527f26a0-8fba-47b1-99a9-551ae7ffaca8/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 03 07:43:56 crc kubenswrapper[4612]: > podSandboxID="9142faf65fb7829aa6f56d85ebfbe831976149fd5cbd4b710fccb79a40c14ded" Dec 03 07:43:56 crc kubenswrapper[4612]: E1203 07:43:56.094540 4612 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 03 07:43:56 crc kubenswrapper[4612]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n8ch647h5fdh676h5c8h566h96h5d8hdh569h64dh5b5h587h55h5cch58dh658h67h5f6h64fh648h6h59fh65ch7hf9hf6h74hf8hch596h5b8q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c25cb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-6bc7876d45-xv5zr_openstack(527f26a0-8fba-47b1-99a9-551ae7ffaca8): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/527f26a0-8fba-47b1-99a9-551ae7ffaca8/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 03 07:43:56 crc kubenswrapper[4612]: > logger="UnhandledError" Dec 03 07:43:56 crc kubenswrapper[4612]: E1203 07:43:56.096016 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/527f26a0-8fba-47b1-99a9-551ae7ffaca8/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" podUID="527f26a0-8fba-47b1-99a9-551ae7ffaca8" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.148854 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.319803 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.320954 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.333356 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.333615 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-z7qwq" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.333737 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.333841 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.383906 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.463846 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d38a92e9-cf02-4966-9bb1-4ea642490d00-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.464275 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d38a92e9-cf02-4966-9bb1-4ea642490d00-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.464331 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d38a92e9-cf02-4966-9bb1-4ea642490d00-scripts\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.464358 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d38a92e9-cf02-4966-9bb1-4ea642490d00-config\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.464383 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d38a92e9-cf02-4966-9bb1-4ea642490d00-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.464464 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d38a92e9-cf02-4966-9bb1-4ea642490d00-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.464482 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj6hm\" (UniqueName: \"kubernetes.io/projected/d38a92e9-cf02-4966-9bb1-4ea642490d00-kube-api-access-dj6hm\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.566063 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d38a92e9-cf02-4966-9bb1-4ea642490d00-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.566105 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj6hm\" (UniqueName: \"kubernetes.io/projected/d38a92e9-cf02-4966-9bb1-4ea642490d00-kube-api-access-dj6hm\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.566148 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d38a92e9-cf02-4966-9bb1-4ea642490d00-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.566184 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d38a92e9-cf02-4966-9bb1-4ea642490d00-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.566229 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d38a92e9-cf02-4966-9bb1-4ea642490d00-scripts\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.566251 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d38a92e9-cf02-4966-9bb1-4ea642490d00-config\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.566268 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d38a92e9-cf02-4966-9bb1-4ea642490d00-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.567048 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d38a92e9-cf02-4966-9bb1-4ea642490d00-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.568016 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d38a92e9-cf02-4966-9bb1-4ea642490d00-scripts\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.568225 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d38a92e9-cf02-4966-9bb1-4ea642490d00-config\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.574690 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d38a92e9-cf02-4966-9bb1-4ea642490d00-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.587997 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d38a92e9-cf02-4966-9bb1-4ea642490d00-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.600664 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d38a92e9-cf02-4966-9bb1-4ea642490d00-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.610826 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj6hm\" (UniqueName: \"kubernetes.io/projected/d38a92e9-cf02-4966-9bb1-4ea642490d00-kube-api-access-dj6hm\") pod \"ovn-northd-0\" (UID: \"d38a92e9-cf02-4966-9bb1-4ea642490d00\") " pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.656784 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.740269 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.741015 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 03 07:43:56 crc kubenswrapper[4612]: E1203 07:43:56.741362 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9567553-5429-419e-9c2d-d7af697d7035" containerName="dnsmasq-dns" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.741387 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9567553-5429-419e-9c2d-d7af697d7035" containerName="dnsmasq-dns" Dec 03 07:43:56 crc kubenswrapper[4612]: E1203 07:43:56.741410 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9567553-5429-419e-9c2d-d7af697d7035" containerName="init" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.741418 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9567553-5429-419e-9c2d-d7af697d7035" containerName="init" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.741599 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9567553-5429-419e-9c2d-d7af697d7035" containerName="dnsmasq-dns" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.770446 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.775903 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9567553-5429-419e-9c2d-d7af697d7035-config\") pod \"b9567553-5429-419e-9c2d-d7af697d7035\" (UID: \"b9567553-5429-419e-9c2d-d7af697d7035\") " Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.776364 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9567553-5429-419e-9c2d-d7af697d7035-dns-svc\") pod \"b9567553-5429-419e-9c2d-d7af697d7035\" (UID: \"b9567553-5429-419e-9c2d-d7af697d7035\") " Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.777411 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkmpv\" (UniqueName: \"kubernetes.io/projected/b9567553-5429-419e-9c2d-d7af697d7035-kube-api-access-bkmpv\") pod \"b9567553-5429-419e-9c2d-d7af697d7035\" (UID: \"b9567553-5429-419e-9c2d-d7af697d7035\") " Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.788842 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-cdwj4" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.789401 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.792159 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.795483 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.799238 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9567553-5429-419e-9c2d-d7af697d7035-kube-api-access-bkmpv" (OuterVolumeSpecName: "kube-api-access-bkmpv") pod "b9567553-5429-419e-9c2d-d7af697d7035" (UID: "b9567553-5429-419e-9c2d-d7af697d7035"). InnerVolumeSpecName "kube-api-access-bkmpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.816213 4612 generic.go:334] "Generic (PLEG): container finished" podID="e52b6d90-ed35-4d7c-8f9a-a0030280959f" containerID="4fd7ba07fb18ef9717fa2333ac7a8737ff1bd0d1bae744f2702edd0f8f29bc5e" exitCode=0 Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.816803 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-sgg7f" event={"ID":"e52b6d90-ed35-4d7c-8f9a-a0030280959f","Type":"ContainerDied","Data":"4fd7ba07fb18ef9717fa2333ac7a8737ff1bd0d1bae744f2702edd0f8f29bc5e"} Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.824373 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.826529 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"10aae93b-2a6b-4a5a-a27e-9c2714777dfb","Type":"ContainerStarted","Data":"bffe6465db3f84adbeff40a5ec8e29084320f2b25efb81856bc6bf650a2cefb8"} Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.855232 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9567553-5429-419e-9c2d-d7af697d7035-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b9567553-5429-419e-9c2d-d7af697d7035" (UID: "b9567553-5429-419e-9c2d-d7af697d7035"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.855574 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"af40404d-eb38-4281-ae78-fa546de7d6a2","Type":"ContainerStarted","Data":"e70b91e05538f7c5bf65a905748c0184edea885084caabe76be873b7d28ddb4f"} Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.858980 4612 generic.go:334] "Generic (PLEG): container finished" podID="b9567553-5429-419e-9c2d-d7af697d7035" containerID="dd6142f14b963bd9691b84aea771a81d9534a9f215548aaebc03bd303d65e07f" exitCode=0 Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.859059 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" event={"ID":"b9567553-5429-419e-9c2d-d7af697d7035","Type":"ContainerDied","Data":"dd6142f14b963bd9691b84aea771a81d9534a9f215548aaebc03bd303d65e07f"} Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.859107 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" event={"ID":"b9567553-5429-419e-9c2d-d7af697d7035","Type":"ContainerDied","Data":"b25851b1fb5b47595c65034c6ae8f71ecf0f035e272dd015a32705642796034c"} Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.859124 4612 scope.go:117] "RemoveContainer" containerID="dd6142f14b963bd9691b84aea771a81d9534a9f215548aaebc03bd303d65e07f" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.861571 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-qk5t8" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.894219 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.894372 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/199ac340-6fa4-414c-b9b1-80aff6965bc0-cache\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.894422 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/199ac340-6fa4-414c-b9b1-80aff6965bc0-lock\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.894534 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.894570 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7d268\" (UniqueName: \"kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-kube-api-access-7d268\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.894639 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9567553-5429-419e-9c2d-d7af697d7035-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.894649 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkmpv\" (UniqueName: \"kubernetes.io/projected/b9567553-5429-419e-9c2d-d7af697d7035-kube-api-access-bkmpv\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.906682 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9567553-5429-419e-9c2d-d7af697d7035-config" (OuterVolumeSpecName: "config") pod "b9567553-5429-419e-9c2d-d7af697d7035" (UID: "b9567553-5429-419e-9c2d-d7af697d7035"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.911637 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-8xkg7"] Dec 03 07:43:56 crc kubenswrapper[4612]: W1203 07:43:56.959160 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9dfbe94_ffdc_4b45_9a25_782be58c3683.slice/crio-3209007a0f6ae5054d4ca885df9f5ab734cebf9fda0e2f131ebe448f6e7001f5 WatchSource:0}: Error finding container 3209007a0f6ae5054d4ca885df9f5ab734cebf9fda0e2f131ebe448f6e7001f5: Status 404 returned error can't find the container with id 3209007a0f6ae5054d4ca885df9f5ab734cebf9fda0e2f131ebe448f6e7001f5 Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.960131 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=9.041223496 podStartE2EDuration="45.960112069s" podCreationTimestamp="2025-12-03 07:43:11 +0000 UTC" firstStartedPulling="2025-12-03 07:43:13.997875886 +0000 UTC m=+957.171233286" lastFinishedPulling="2025-12-03 07:43:50.916764459 +0000 UTC m=+994.090121859" observedRunningTime="2025-12-03 07:43:56.959818441 +0000 UTC m=+1000.133175841" watchObservedRunningTime="2025-12-03 07:43:56.960112069 +0000 UTC m=+1000.133469469" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.962769 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=8.552315427 podStartE2EDuration="46.962757453s" podCreationTimestamp="2025-12-03 07:43:10 +0000 UTC" firstStartedPulling="2025-12-03 07:43:12.506669271 +0000 UTC m=+955.680026671" lastFinishedPulling="2025-12-03 07:43:50.917111297 +0000 UTC m=+994.090468697" observedRunningTime="2025-12-03 07:43:56.923231244 +0000 UTC m=+1000.096588654" watchObservedRunningTime="2025-12-03 07:43:56.962757453 +0000 UTC m=+1000.136114853" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.968573 4612 scope.go:117] "RemoveContainer" containerID="3baff9760363c207fe5de7827ec8d2e316ce36b54eb1d2ae8a044ccf9cc3c977" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.995998 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/199ac340-6fa4-414c-b9b1-80aff6965bc0-lock\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.997705 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/swift-storage-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.997249 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.998792 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7d268\" (UniqueName: \"kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-kube-api-access-7d268\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.999097 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/199ac340-6fa4-414c-b9b1-80aff6965bc0-lock\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:56 crc kubenswrapper[4612]: I1203 07:43:56.999392 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.000021 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/199ac340-6fa4-414c-b9b1-80aff6965bc0-cache\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.001653 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9567553-5429-419e-9c2d-d7af697d7035-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.001116 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/199ac340-6fa4-414c-b9b1-80aff6965bc0-cache\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:57 crc kubenswrapper[4612]: E1203 07:43:57.000605 4612 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 07:43:57 crc kubenswrapper[4612]: E1203 07:43:57.017094 4612 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 07:43:57 crc kubenswrapper[4612]: E1203 07:43:57.017179 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift podName:199ac340-6fa4-414c-b9b1-80aff6965bc0 nodeName:}" failed. No retries permitted until 2025-12-03 07:43:57.517157672 +0000 UTC m=+1000.690515072 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift") pod "swift-storage-0" (UID: "199ac340-6fa4-414c-b9b1-80aff6965bc0") : configmap "swift-ring-files" not found Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.003726 4612 scope.go:117] "RemoveContainer" containerID="dd6142f14b963bd9691b84aea771a81d9534a9f215548aaebc03bd303d65e07f" Dec 03 07:43:57 crc kubenswrapper[4612]: E1203 07:43:57.019538 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd6142f14b963bd9691b84aea771a81d9534a9f215548aaebc03bd303d65e07f\": container with ID starting with dd6142f14b963bd9691b84aea771a81d9534a9f215548aaebc03bd303d65e07f not found: ID does not exist" containerID="dd6142f14b963bd9691b84aea771a81d9534a9f215548aaebc03bd303d65e07f" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.019609 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd6142f14b963bd9691b84aea771a81d9534a9f215548aaebc03bd303d65e07f"} err="failed to get container status \"dd6142f14b963bd9691b84aea771a81d9534a9f215548aaebc03bd303d65e07f\": rpc error: code = NotFound desc = could not find container \"dd6142f14b963bd9691b84aea771a81d9534a9f215548aaebc03bd303d65e07f\": container with ID starting with dd6142f14b963bd9691b84aea771a81d9534a9f215548aaebc03bd303d65e07f not found: ID does not exist" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.019638 4612 scope.go:117] "RemoveContainer" containerID="3baff9760363c207fe5de7827ec8d2e316ce36b54eb1d2ae8a044ccf9cc3c977" Dec 03 07:43:57 crc kubenswrapper[4612]: E1203 07:43:57.023080 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3baff9760363c207fe5de7827ec8d2e316ce36b54eb1d2ae8a044ccf9cc3c977\": container with ID starting with 3baff9760363c207fe5de7827ec8d2e316ce36b54eb1d2ae8a044ccf9cc3c977 not found: ID does not exist" containerID="3baff9760363c207fe5de7827ec8d2e316ce36b54eb1d2ae8a044ccf9cc3c977" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.023127 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3baff9760363c207fe5de7827ec8d2e316ce36b54eb1d2ae8a044ccf9cc3c977"} err="failed to get container status \"3baff9760363c207fe5de7827ec8d2e316ce36b54eb1d2ae8a044ccf9cc3c977\": rpc error: code = NotFound desc = could not find container \"3baff9760363c207fe5de7827ec8d2e316ce36b54eb1d2ae8a044ccf9cc3c977\": container with ID starting with 3baff9760363c207fe5de7827ec8d2e316ce36b54eb1d2ae8a044ccf9cc3c977 not found: ID does not exist" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.034278 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7d268\" (UniqueName: \"kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-kube-api-access-7d268\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.035359 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:57 crc kubenswrapper[4612]: E1203 07:43:57.091926 4612 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 03 07:43:57 crc kubenswrapper[4612]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/e52b6d90-ed35-4d7c-8f9a-a0030280959f/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 03 07:43:57 crc kubenswrapper[4612]: > podSandboxID="06d72f94076d76ccd9837f12d6c505e07c0230a78fa72e4b64b2e18191e5ab85" Dec 03 07:43:57 crc kubenswrapper[4612]: E1203 07:43:57.092109 4612 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 03 07:43:57 crc kubenswrapper[4612]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n654h99h64ch5dbh6dh555h587h64bh5cfh647h5fdh57ch679h9h597h5f5hbch59bh54fh575h566h667h586h5f5h65ch5bch57h68h65ch58bh694h5cfq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9rlpp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-8554648995-sgg7f_openstack(e52b6d90-ed35-4d7c-8f9a-a0030280959f): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/e52b6d90-ed35-4d7c-8f9a-a0030280959f/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 03 07:43:57 crc kubenswrapper[4612]: > logger="UnhandledError" Dec 03 07:43:57 crc kubenswrapper[4612]: E1203 07:43:57.093246 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/e52b6d90-ed35-4d7c-8f9a-a0030280959f/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-8554648995-sgg7f" podUID="e52b6d90-ed35-4d7c-8f9a-a0030280959f" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.116565 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47a166f9-373e-4d5e-aaf4-31beb9644df2" path="/var/lib/kubelet/pods/47a166f9-373e-4d5e-aaf4-31beb9644df2/volumes" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.189976 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qk5t8"] Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.201246 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qk5t8"] Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.263131 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.309858 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-ovsdbserver-sb\") pod \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.309902 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c25cb\" (UniqueName: \"kubernetes.io/projected/527f26a0-8fba-47b1-99a9-551ae7ffaca8-kube-api-access-c25cb\") pod \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.309952 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-dns-svc\") pod \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.310009 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-config\") pod \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\" (UID: \"527f26a0-8fba-47b1-99a9-551ae7ffaca8\") " Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.314212 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/527f26a0-8fba-47b1-99a9-551ae7ffaca8-kube-api-access-c25cb" (OuterVolumeSpecName: "kube-api-access-c25cb") pod "527f26a0-8fba-47b1-99a9-551ae7ffaca8" (UID: "527f26a0-8fba-47b1-99a9-551ae7ffaca8"). InnerVolumeSpecName "kube-api-access-c25cb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.316755 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.380087 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "527f26a0-8fba-47b1-99a9-551ae7ffaca8" (UID: "527f26a0-8fba-47b1-99a9-551ae7ffaca8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.380382 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-p474m"] Dec 03 07:43:57 crc kubenswrapper[4612]: E1203 07:43:57.380770 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="527f26a0-8fba-47b1-99a9-551ae7ffaca8" containerName="init" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.380787 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="527f26a0-8fba-47b1-99a9-551ae7ffaca8" containerName="init" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.381174 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="527f26a0-8fba-47b1-99a9-551ae7ffaca8" containerName="init" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.381769 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.385241 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.385457 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.386257 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.390871 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-p474m"] Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.401574 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "527f26a0-8fba-47b1-99a9-551ae7ffaca8" (UID: "527f26a0-8fba-47b1-99a9-551ae7ffaca8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.411662 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0aff4c7e-d189-4658-b1a6-388353c8dfa8-scripts\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.411758 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-dispersionconf\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.411918 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ns5wg\" (UniqueName: \"kubernetes.io/projected/0aff4c7e-d189-4658-b1a6-388353c8dfa8-kube-api-access-ns5wg\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.411988 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-swiftconf\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.412546 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0aff4c7e-d189-4658-b1a6-388353c8dfa8-etc-swift\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.412699 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-combined-ca-bundle\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.412842 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0aff4c7e-d189-4658-b1a6-388353c8dfa8-ring-data-devices\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.412995 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.413013 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c25cb\" (UniqueName: \"kubernetes.io/projected/527f26a0-8fba-47b1-99a9-551ae7ffaca8-kube-api-access-c25cb\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.413023 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.419825 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-config" (OuterVolumeSpecName: "config") pod "527f26a0-8fba-47b1-99a9-551ae7ffaca8" (UID: "527f26a0-8fba-47b1-99a9-551ae7ffaca8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.514670 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ns5wg\" (UniqueName: \"kubernetes.io/projected/0aff4c7e-d189-4658-b1a6-388353c8dfa8-kube-api-access-ns5wg\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.516172 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-swiftconf\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.517207 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.517245 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0aff4c7e-d189-4658-b1a6-388353c8dfa8-etc-swift\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.517264 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-combined-ca-bundle\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.517284 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0aff4c7e-d189-4658-b1a6-388353c8dfa8-ring-data-devices\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.517769 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0aff4c7e-d189-4658-b1a6-388353c8dfa8-etc-swift\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.518137 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0aff4c7e-d189-4658-b1a6-388353c8dfa8-ring-data-devices\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: E1203 07:43:57.518235 4612 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 07:43:57 crc kubenswrapper[4612]: E1203 07:43:57.518249 4612 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 07:43:57 crc kubenswrapper[4612]: E1203 07:43:57.518295 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift podName:199ac340-6fa4-414c-b9b1-80aff6965bc0 nodeName:}" failed. No retries permitted until 2025-12-03 07:43:58.518283169 +0000 UTC m=+1001.691640569 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift") pod "swift-storage-0" (UID: "199ac340-6fa4-414c-b9b1-80aff6965bc0") : configmap "swift-ring-files" not found Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.518777 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0aff4c7e-d189-4658-b1a6-388353c8dfa8-scripts\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.518808 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-dispersionconf\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.519510 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0aff4c7e-d189-4658-b1a6-388353c8dfa8-scripts\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.520515 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/527f26a0-8fba-47b1-99a9-551ae7ffaca8-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.521245 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-swiftconf\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.521440 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-dispersionconf\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.524536 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-combined-ca-bundle\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.537628 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ns5wg\" (UniqueName: \"kubernetes.io/projected/0aff4c7e-d189-4658-b1a6-388353c8dfa8-kube-api-access-ns5wg\") pod \"swift-ring-rebalance-p474m\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.787447 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.874622 4612 generic.go:334] "Generic (PLEG): container finished" podID="a9dfbe94-ffdc-4b45-9a25-782be58c3683" containerID="051caf474e8cd78dde6f4d638bfebb677803865d73494495603a8bfe760fbd0c" exitCode=0 Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.874738 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" event={"ID":"a9dfbe94-ffdc-4b45-9a25-782be58c3683","Type":"ContainerDied","Data":"051caf474e8cd78dde6f4d638bfebb677803865d73494495603a8bfe760fbd0c"} Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.874793 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" event={"ID":"a9dfbe94-ffdc-4b45-9a25-782be58c3683","Type":"ContainerStarted","Data":"3209007a0f6ae5054d4ca885df9f5ab734cebf9fda0e2f131ebe448f6e7001f5"} Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.890736 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"d38a92e9-cf02-4966-9bb1-4ea642490d00","Type":"ContainerStarted","Data":"b920c94952ebf658807a68340b3089793bf264651c4b411773d27a1a89139d57"} Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.917786 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" event={"ID":"527f26a0-8fba-47b1-99a9-551ae7ffaca8","Type":"ContainerDied","Data":"9142faf65fb7829aa6f56d85ebfbe831976149fd5cbd4b710fccb79a40c14ded"} Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.918539 4612 scope.go:117] "RemoveContainer" containerID="ddf91f0b4c1b77e38095a8c8c2186db43345626cece3fb6c58fc64eb2773ad8d" Dec 03 07:43:57 crc kubenswrapper[4612]: I1203 07:43:57.919018 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-xv5zr" Dec 03 07:43:58 crc kubenswrapper[4612]: I1203 07:43:58.054517 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xv5zr"] Dec 03 07:43:58 crc kubenswrapper[4612]: I1203 07:43:58.062890 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xv5zr"] Dec 03 07:43:58 crc kubenswrapper[4612]: I1203 07:43:58.299107 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-p474m"] Dec 03 07:43:58 crc kubenswrapper[4612]: W1203 07:43:58.310562 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0aff4c7e_d189_4658_b1a6_388353c8dfa8.slice/crio-fb6fbc2aaef0848c5442596ba1d31f42ac9c9cf159369a83c7ad84bea2a20fb3 WatchSource:0}: Error finding container fb6fbc2aaef0848c5442596ba1d31f42ac9c9cf159369a83c7ad84bea2a20fb3: Status 404 returned error can't find the container with id fb6fbc2aaef0848c5442596ba1d31f42ac9c9cf159369a83c7ad84bea2a20fb3 Dec 03 07:43:58 crc kubenswrapper[4612]: I1203 07:43:58.546615 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:43:58 crc kubenswrapper[4612]: E1203 07:43:58.547406 4612 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 07:43:58 crc kubenswrapper[4612]: E1203 07:43:58.547427 4612 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 07:43:58 crc kubenswrapper[4612]: E1203 07:43:58.548492 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift podName:199ac340-6fa4-414c-b9b1-80aff6965bc0 nodeName:}" failed. No retries permitted until 2025-12-03 07:44:00.547492616 +0000 UTC m=+1003.720850036 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift") pod "swift-storage-0" (UID: "199ac340-6fa4-414c-b9b1-80aff6965bc0") : configmap "swift-ring-files" not found Dec 03 07:43:58 crc kubenswrapper[4612]: I1203 07:43:58.932532 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-sgg7f" event={"ID":"e52b6d90-ed35-4d7c-8f9a-a0030280959f","Type":"ContainerStarted","Data":"def4251eb041843d35dc329b7e9e3ee1c258f787c2ac6a9a572f741e0e0ad18f"} Dec 03 07:43:58 crc kubenswrapper[4612]: I1203 07:43:58.932756 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:43:58 crc kubenswrapper[4612]: I1203 07:43:58.937453 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-p474m" event={"ID":"0aff4c7e-d189-4658-b1a6-388353c8dfa8","Type":"ContainerStarted","Data":"fb6fbc2aaef0848c5442596ba1d31f42ac9c9cf159369a83c7ad84bea2a20fb3"} Dec 03 07:43:58 crc kubenswrapper[4612]: I1203 07:43:58.946456 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" event={"ID":"a9dfbe94-ffdc-4b45-9a25-782be58c3683","Type":"ContainerStarted","Data":"b92ab2ab027867594ed256c3c3b3b44d6518a0b46b69de37d592bd73bbd3e2b3"} Dec 03 07:43:58 crc kubenswrapper[4612]: I1203 07:43:58.950242 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:43:58 crc kubenswrapper[4612]: I1203 07:43:58.953917 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-sgg7f" podStartSLOduration=4.9539030539999995 podStartE2EDuration="4.953903054s" podCreationTimestamp="2025-12-03 07:43:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:43:58.950569203 +0000 UTC m=+1002.123926603" watchObservedRunningTime="2025-12-03 07:43:58.953903054 +0000 UTC m=+1002.127260464" Dec 03 07:43:58 crc kubenswrapper[4612]: I1203 07:43:58.955361 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"d38a92e9-cf02-4966-9bb1-4ea642490d00","Type":"ContainerStarted","Data":"d5ea16a94ff58e7e3ed5a103bd4182b49c39dfcdbc4cf8fd7c339e42c91e437d"} Dec 03 07:43:58 crc kubenswrapper[4612]: I1203 07:43:58.969986 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" podStartSLOduration=3.969970664 podStartE2EDuration="3.969970664s" podCreationTimestamp="2025-12-03 07:43:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:43:58.968114918 +0000 UTC m=+1002.141472308" watchObservedRunningTime="2025-12-03 07:43:58.969970664 +0000 UTC m=+1002.143328074" Dec 03 07:43:59 crc kubenswrapper[4612]: I1203 07:43:59.102654 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="527f26a0-8fba-47b1-99a9-551ae7ffaca8" path="/var/lib/kubelet/pods/527f26a0-8fba-47b1-99a9-551ae7ffaca8/volumes" Dec 03 07:43:59 crc kubenswrapper[4612]: I1203 07:43:59.103234 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9567553-5429-419e-9c2d-d7af697d7035" path="/var/lib/kubelet/pods/b9567553-5429-419e-9c2d-d7af697d7035/volumes" Dec 03 07:43:59 crc kubenswrapper[4612]: I1203 07:43:59.970258 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"d38a92e9-cf02-4966-9bb1-4ea642490d00","Type":"ContainerStarted","Data":"f6d522e38dc8123998b5ba785ec1aabf90c92da68ca7c63a5d526e49bce1e1ff"} Dec 03 07:43:59 crc kubenswrapper[4612]: I1203 07:43:59.970853 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 03 07:44:00 crc kubenswrapper[4612]: E1203 07:44:00.467888 4612 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf87dca8_ceaa_424a_8074_7a63c648b84b.slice/crio-253ee07d7cd8000b3306a0841b31f595ab9d1ea323b7f796e6790764b3205b1e.scope\": RecentStats: unable to find data in memory cache]" Dec 03 07:44:00 crc kubenswrapper[4612]: E1203 07:44:00.565625 4612 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.144:49106->38.102.83.144:45247: write tcp 38.102.83.144:49106->38.102.83.144:45247: write: broken pipe Dec 03 07:44:00 crc kubenswrapper[4612]: I1203 07:44:00.580903 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:44:00 crc kubenswrapper[4612]: E1203 07:44:00.581201 4612 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 07:44:00 crc kubenswrapper[4612]: E1203 07:44:00.581242 4612 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 07:44:00 crc kubenswrapper[4612]: E1203 07:44:00.581315 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift podName:199ac340-6fa4-414c-b9b1-80aff6965bc0 nodeName:}" failed. No retries permitted until 2025-12-03 07:44:04.581291252 +0000 UTC m=+1007.754648652 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift") pod "swift-storage-0" (UID: "199ac340-6fa4-414c-b9b1-80aff6965bc0") : configmap "swift-ring-files" not found Dec 03 07:44:01 crc kubenswrapper[4612]: I1203 07:44:01.736246 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 03 07:44:01 crc kubenswrapper[4612]: I1203 07:44:01.736347 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 03 07:44:01 crc kubenswrapper[4612]: I1203 07:44:01.814846 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 03 07:44:01 crc kubenswrapper[4612]: I1203 07:44:01.848256 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=4.553391136 podStartE2EDuration="5.848237746s" podCreationTimestamp="2025-12-03 07:43:56 +0000 UTC" firstStartedPulling="2025-12-03 07:43:57.364364145 +0000 UTC m=+1000.537721545" lastFinishedPulling="2025-12-03 07:43:58.659210755 +0000 UTC m=+1001.832568155" observedRunningTime="2025-12-03 07:44:00.0016336 +0000 UTC m=+1003.174991020" watchObservedRunningTime="2025-12-03 07:44:01.848237746 +0000 UTC m=+1005.021595146" Dec 03 07:44:02 crc kubenswrapper[4612]: I1203 07:44:02.053981 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.054745 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.054789 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.140064 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.309294 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-tkm2k"] Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.311479 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-tkm2k" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.319281 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-tkm2k"] Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.427582 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7356-account-create-update-4kqnh"] Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.428743 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7356-account-create-update-4kqnh" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.434536 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.435450 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6dll\" (UniqueName: \"kubernetes.io/projected/45a224b3-8904-49fe-a237-6a8d3b2755eb-kube-api-access-d6dll\") pod \"keystone-db-create-tkm2k\" (UID: \"45a224b3-8904-49fe-a237-6a8d3b2755eb\") " pod="openstack/keystone-db-create-tkm2k" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.435488 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45a224b3-8904-49fe-a237-6a8d3b2755eb-operator-scripts\") pod \"keystone-db-create-tkm2k\" (UID: \"45a224b3-8904-49fe-a237-6a8d3b2755eb\") " pod="openstack/keystone-db-create-tkm2k" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.442068 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7356-account-create-update-4kqnh"] Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.523274 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-c7fgx"] Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.524301 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-c7fgx" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.534314 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-c7fgx"] Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.538041 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59e2290a-9814-42f9-acd7-ce36e42dd5e9-operator-scripts\") pod \"keystone-7356-account-create-update-4kqnh\" (UID: \"59e2290a-9814-42f9-acd7-ce36e42dd5e9\") " pod="openstack/keystone-7356-account-create-update-4kqnh" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.538099 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6dll\" (UniqueName: \"kubernetes.io/projected/45a224b3-8904-49fe-a237-6a8d3b2755eb-kube-api-access-d6dll\") pod \"keystone-db-create-tkm2k\" (UID: \"45a224b3-8904-49fe-a237-6a8d3b2755eb\") " pod="openstack/keystone-db-create-tkm2k" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.538126 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45a224b3-8904-49fe-a237-6a8d3b2755eb-operator-scripts\") pod \"keystone-db-create-tkm2k\" (UID: \"45a224b3-8904-49fe-a237-6a8d3b2755eb\") " pod="openstack/keystone-db-create-tkm2k" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.538176 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgkpw\" (UniqueName: \"kubernetes.io/projected/59e2290a-9814-42f9-acd7-ce36e42dd5e9-kube-api-access-tgkpw\") pod \"keystone-7356-account-create-update-4kqnh\" (UID: \"59e2290a-9814-42f9-acd7-ce36e42dd5e9\") " pod="openstack/keystone-7356-account-create-update-4kqnh" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.539062 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45a224b3-8904-49fe-a237-6a8d3b2755eb-operator-scripts\") pod \"keystone-db-create-tkm2k\" (UID: \"45a224b3-8904-49fe-a237-6a8d3b2755eb\") " pod="openstack/keystone-db-create-tkm2k" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.590896 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6dll\" (UniqueName: \"kubernetes.io/projected/45a224b3-8904-49fe-a237-6a8d3b2755eb-kube-api-access-d6dll\") pod \"keystone-db-create-tkm2k\" (UID: \"45a224b3-8904-49fe-a237-6a8d3b2755eb\") " pod="openstack/keystone-db-create-tkm2k" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.632113 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-tkm2k" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.637826 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-cbc4-account-create-update-whdk7"] Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.638868 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cbc4-account-create-update-whdk7" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.639624 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59e2290a-9814-42f9-acd7-ce36e42dd5e9-operator-scripts\") pod \"keystone-7356-account-create-update-4kqnh\" (UID: \"59e2290a-9814-42f9-acd7-ce36e42dd5e9\") " pod="openstack/keystone-7356-account-create-update-4kqnh" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.639817 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgkpw\" (UniqueName: \"kubernetes.io/projected/59e2290a-9814-42f9-acd7-ce36e42dd5e9-kube-api-access-tgkpw\") pod \"keystone-7356-account-create-update-4kqnh\" (UID: \"59e2290a-9814-42f9-acd7-ce36e42dd5e9\") " pod="openstack/keystone-7356-account-create-update-4kqnh" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.639976 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqj9k\" (UniqueName: \"kubernetes.io/projected/b2201704-0a33-48bd-933a-879d56b8e6e7-kube-api-access-sqj9k\") pod \"placement-db-create-c7fgx\" (UID: \"b2201704-0a33-48bd-933a-879d56b8e6e7\") " pod="openstack/placement-db-create-c7fgx" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.640218 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b2201704-0a33-48bd-933a-879d56b8e6e7-operator-scripts\") pod \"placement-db-create-c7fgx\" (UID: \"b2201704-0a33-48bd-933a-879d56b8e6e7\") " pod="openstack/placement-db-create-c7fgx" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.640770 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.644571 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59e2290a-9814-42f9-acd7-ce36e42dd5e9-operator-scripts\") pod \"keystone-7356-account-create-update-4kqnh\" (UID: \"59e2290a-9814-42f9-acd7-ce36e42dd5e9\") " pod="openstack/keystone-7356-account-create-update-4kqnh" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.657532 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-cbc4-account-create-update-whdk7"] Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.676483 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgkpw\" (UniqueName: \"kubernetes.io/projected/59e2290a-9814-42f9-acd7-ce36e42dd5e9-kube-api-access-tgkpw\") pod \"keystone-7356-account-create-update-4kqnh\" (UID: \"59e2290a-9814-42f9-acd7-ce36e42dd5e9\") " pod="openstack/keystone-7356-account-create-update-4kqnh" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.742127 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54-operator-scripts\") pod \"placement-cbc4-account-create-update-whdk7\" (UID: \"13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54\") " pod="openstack/placement-cbc4-account-create-update-whdk7" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.742193 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqj9k\" (UniqueName: \"kubernetes.io/projected/b2201704-0a33-48bd-933a-879d56b8e6e7-kube-api-access-sqj9k\") pod \"placement-db-create-c7fgx\" (UID: \"b2201704-0a33-48bd-933a-879d56b8e6e7\") " pod="openstack/placement-db-create-c7fgx" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.742255 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b2201704-0a33-48bd-933a-879d56b8e6e7-operator-scripts\") pod \"placement-db-create-c7fgx\" (UID: \"b2201704-0a33-48bd-933a-879d56b8e6e7\") " pod="openstack/placement-db-create-c7fgx" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.742326 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhl5d\" (UniqueName: \"kubernetes.io/projected/13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54-kube-api-access-nhl5d\") pod \"placement-cbc4-account-create-update-whdk7\" (UID: \"13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54\") " pod="openstack/placement-cbc4-account-create-update-whdk7" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.743205 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b2201704-0a33-48bd-933a-879d56b8e6e7-operator-scripts\") pod \"placement-db-create-c7fgx\" (UID: \"b2201704-0a33-48bd-933a-879d56b8e6e7\") " pod="openstack/placement-db-create-c7fgx" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.746866 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7356-account-create-update-4kqnh" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.762150 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqj9k\" (UniqueName: \"kubernetes.io/projected/b2201704-0a33-48bd-933a-879d56b8e6e7-kube-api-access-sqj9k\") pod \"placement-db-create-c7fgx\" (UID: \"b2201704-0a33-48bd-933a-879d56b8e6e7\") " pod="openstack/placement-db-create-c7fgx" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.827378 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-47bvp"] Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.828831 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-47bvp" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.835837 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-47bvp"] Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.844700 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhl5d\" (UniqueName: \"kubernetes.io/projected/13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54-kube-api-access-nhl5d\") pod \"placement-cbc4-account-create-update-whdk7\" (UID: \"13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54\") " pod="openstack/placement-cbc4-account-create-update-whdk7" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.844765 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc-operator-scripts\") pod \"glance-db-create-47bvp\" (UID: \"7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc\") " pod="openstack/glance-db-create-47bvp" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.844795 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9tmb\" (UniqueName: \"kubernetes.io/projected/7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc-kube-api-access-l9tmb\") pod \"glance-db-create-47bvp\" (UID: \"7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc\") " pod="openstack/glance-db-create-47bvp" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.844860 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54-operator-scripts\") pod \"placement-cbc4-account-create-update-whdk7\" (UID: \"13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54\") " pod="openstack/placement-cbc4-account-create-update-whdk7" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.845656 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54-operator-scripts\") pod \"placement-cbc4-account-create-update-whdk7\" (UID: \"13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54\") " pod="openstack/placement-cbc4-account-create-update-whdk7" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.845808 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-c7fgx" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.864207 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhl5d\" (UniqueName: \"kubernetes.io/projected/13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54-kube-api-access-nhl5d\") pod \"placement-cbc4-account-create-update-whdk7\" (UID: \"13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54\") " pod="openstack/placement-cbc4-account-create-update-whdk7" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.932296 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-bf84-account-create-update-jw9wr"] Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.933444 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bf84-account-create-update-jw9wr" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.935623 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.946614 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaf141e2-8218-4144-b0f2-d4568f152c99-operator-scripts\") pod \"glance-bf84-account-create-update-jw9wr\" (UID: \"eaf141e2-8218-4144-b0f2-d4568f152c99\") " pod="openstack/glance-bf84-account-create-update-jw9wr" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.946690 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc-operator-scripts\") pod \"glance-db-create-47bvp\" (UID: \"7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc\") " pod="openstack/glance-db-create-47bvp" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.946723 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9tmb\" (UniqueName: \"kubernetes.io/projected/7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc-kube-api-access-l9tmb\") pod \"glance-db-create-47bvp\" (UID: \"7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc\") " pod="openstack/glance-db-create-47bvp" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.947013 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n87ms\" (UniqueName: \"kubernetes.io/projected/eaf141e2-8218-4144-b0f2-d4568f152c99-kube-api-access-n87ms\") pod \"glance-bf84-account-create-update-jw9wr\" (UID: \"eaf141e2-8218-4144-b0f2-d4568f152c99\") " pod="openstack/glance-bf84-account-create-update-jw9wr" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.950747 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-bf84-account-create-update-jw9wr"] Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.951279 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc-operator-scripts\") pod \"glance-db-create-47bvp\" (UID: \"7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc\") " pod="openstack/glance-db-create-47bvp" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.971420 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cbc4-account-create-update-whdk7" Dec 03 07:44:03 crc kubenswrapper[4612]: I1203 07:44:03.981207 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9tmb\" (UniqueName: \"kubernetes.io/projected/7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc-kube-api-access-l9tmb\") pod \"glance-db-create-47bvp\" (UID: \"7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc\") " pod="openstack/glance-db-create-47bvp" Dec 03 07:44:04 crc kubenswrapper[4612]: I1203 07:44:04.048422 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n87ms\" (UniqueName: \"kubernetes.io/projected/eaf141e2-8218-4144-b0f2-d4568f152c99-kube-api-access-n87ms\") pod \"glance-bf84-account-create-update-jw9wr\" (UID: \"eaf141e2-8218-4144-b0f2-d4568f152c99\") " pod="openstack/glance-bf84-account-create-update-jw9wr" Dec 03 07:44:04 crc kubenswrapper[4612]: I1203 07:44:04.048476 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaf141e2-8218-4144-b0f2-d4568f152c99-operator-scripts\") pod \"glance-bf84-account-create-update-jw9wr\" (UID: \"eaf141e2-8218-4144-b0f2-d4568f152c99\") " pod="openstack/glance-bf84-account-create-update-jw9wr" Dec 03 07:44:04 crc kubenswrapper[4612]: I1203 07:44:04.049181 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaf141e2-8218-4144-b0f2-d4568f152c99-operator-scripts\") pod \"glance-bf84-account-create-update-jw9wr\" (UID: \"eaf141e2-8218-4144-b0f2-d4568f152c99\") " pod="openstack/glance-bf84-account-create-update-jw9wr" Dec 03 07:44:04 crc kubenswrapper[4612]: I1203 07:44:04.064328 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n87ms\" (UniqueName: \"kubernetes.io/projected/eaf141e2-8218-4144-b0f2-d4568f152c99-kube-api-access-n87ms\") pod \"glance-bf84-account-create-update-jw9wr\" (UID: \"eaf141e2-8218-4144-b0f2-d4568f152c99\") " pod="openstack/glance-bf84-account-create-update-jw9wr" Dec 03 07:44:04 crc kubenswrapper[4612]: I1203 07:44:04.073033 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 03 07:44:04 crc kubenswrapper[4612]: I1203 07:44:04.152346 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-47bvp" Dec 03 07:44:04 crc kubenswrapper[4612]: I1203 07:44:04.254608 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bf84-account-create-update-jw9wr" Dec 03 07:44:04 crc kubenswrapper[4612]: I1203 07:44:04.659650 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:44:04 crc kubenswrapper[4612]: E1203 07:44:04.659856 4612 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 07:44:04 crc kubenswrapper[4612]: E1203 07:44:04.659881 4612 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 07:44:04 crc kubenswrapper[4612]: E1203 07:44:04.659973 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift podName:199ac340-6fa4-414c-b9b1-80aff6965bc0 nodeName:}" failed. No retries permitted until 2025-12-03 07:44:12.659920812 +0000 UTC m=+1015.833278212 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift") pod "swift-storage-0" (UID: "199ac340-6fa4-414c-b9b1-80aff6965bc0") : configmap "swift-ring-files" not found Dec 03 07:44:04 crc kubenswrapper[4612]: I1203 07:44:04.876660 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:44:05 crc kubenswrapper[4612]: I1203 07:44:05.289880 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-cbc4-account-create-update-whdk7"] Dec 03 07:44:05 crc kubenswrapper[4612]: W1203 07:44:05.484874 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb2201704_0a33_48bd_933a_879d56b8e6e7.slice/crio-715d7aaf894bd82f9368a7d73c8958d223c72bf30746e926d0d9749d8ed982ba WatchSource:0}: Error finding container 715d7aaf894bd82f9368a7d73c8958d223c72bf30746e926d0d9749d8ed982ba: Status 404 returned error can't find the container with id 715d7aaf894bd82f9368a7d73c8958d223c72bf30746e926d0d9749d8ed982ba Dec 03 07:44:05 crc kubenswrapper[4612]: I1203 07:44:05.489360 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-c7fgx"] Dec 03 07:44:05 crc kubenswrapper[4612]: I1203 07:44:05.617588 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-bf84-account-create-update-jw9wr"] Dec 03 07:44:05 crc kubenswrapper[4612]: I1203 07:44:05.636960 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-47bvp"] Dec 03 07:44:05 crc kubenswrapper[4612]: W1203 07:44:05.637566 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59e2290a_9814_42f9_acd7_ce36e42dd5e9.slice/crio-f7a24b6100779c69f593a473238394b7078d34659ec4a5cd9faa351a273b6a91 WatchSource:0}: Error finding container f7a24b6100779c69f593a473238394b7078d34659ec4a5cd9faa351a273b6a91: Status 404 returned error can't find the container with id f7a24b6100779c69f593a473238394b7078d34659ec4a5cd9faa351a273b6a91 Dec 03 07:44:05 crc kubenswrapper[4612]: I1203 07:44:05.669957 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7356-account-create-update-4kqnh"] Dec 03 07:44:05 crc kubenswrapper[4612]: I1203 07:44:05.691726 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-tkm2k"] Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.043423 4612 generic.go:334] "Generic (PLEG): container finished" podID="b2201704-0a33-48bd-933a-879d56b8e6e7" containerID="beb97f036dccaae439558c36c712c3cf061a3758093fd654ca39369e4a79e023" exitCode=0 Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.043542 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-c7fgx" event={"ID":"b2201704-0a33-48bd-933a-879d56b8e6e7","Type":"ContainerDied","Data":"beb97f036dccaae439558c36c712c3cf061a3758093fd654ca39369e4a79e023"} Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.043569 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-c7fgx" event={"ID":"b2201704-0a33-48bd-933a-879d56b8e6e7","Type":"ContainerStarted","Data":"715d7aaf894bd82f9368a7d73c8958d223c72bf30746e926d0d9749d8ed982ba"} Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.046532 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-tkm2k" event={"ID":"45a224b3-8904-49fe-a237-6a8d3b2755eb","Type":"ContainerStarted","Data":"af9bc12e3a8232a0539f02a5defaf8a565b7e209500fdcf68105da6bce9ceba3"} Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.051590 4612 generic.go:334] "Generic (PLEG): container finished" podID="13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54" containerID="ebe163931d8867d7a4f7e1ad88c24b740368a5bcc3edf74ffb77e18c32453f1e" exitCode=0 Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.051639 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cbc4-account-create-update-whdk7" event={"ID":"13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54","Type":"ContainerDied","Data":"ebe163931d8867d7a4f7e1ad88c24b740368a5bcc3edf74ffb77e18c32453f1e"} Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.051683 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cbc4-account-create-update-whdk7" event={"ID":"13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54","Type":"ContainerStarted","Data":"7bb3f3afb1865d532380d920bdaa86d2f054e4037473b8df1f1bbc4bcffbbe81"} Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.053096 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7356-account-create-update-4kqnh" event={"ID":"59e2290a-9814-42f9-acd7-ce36e42dd5e9","Type":"ContainerStarted","Data":"f7a24b6100779c69f593a473238394b7078d34659ec4a5cd9faa351a273b6a91"} Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.054474 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-bf84-account-create-update-jw9wr" event={"ID":"eaf141e2-8218-4144-b0f2-d4568f152c99","Type":"ContainerStarted","Data":"4f91aeef871684b076cca21ae7678531868f54f433bf52bbc15dbed05ed74c76"} Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.056687 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-47bvp" event={"ID":"7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc","Type":"ContainerStarted","Data":"00f0e957a442aa17bdbc86199bf5cc42697a79e47617bb06fd7eb286cf68f738"} Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.058582 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-p474m" event={"ID":"0aff4c7e-d189-4658-b1a6-388353c8dfa8","Type":"ContainerStarted","Data":"114e29d2ba75340c8d2fe3b988b4b03dd008546d1716135ddc7b37b90e1c9294"} Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.078218 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-p474m" podStartSLOduration=2.42283639 podStartE2EDuration="9.078198646s" podCreationTimestamp="2025-12-03 07:43:57 +0000 UTC" firstStartedPulling="2025-12-03 07:43:58.312188708 +0000 UTC m=+1001.485546108" lastFinishedPulling="2025-12-03 07:44:04.967550964 +0000 UTC m=+1008.140908364" observedRunningTime="2025-12-03 07:44:06.074857305 +0000 UTC m=+1009.248214705" watchObservedRunningTime="2025-12-03 07:44:06.078198646 +0000 UTC m=+1009.251556056" Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.151165 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.209079 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-sgg7f"] Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.209282 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-sgg7f" podUID="e52b6d90-ed35-4d7c-8f9a-a0030280959f" containerName="dnsmasq-dns" containerID="cri-o://def4251eb041843d35dc329b7e9e3ee1c258f787c2ac6a9a572f741e0e0ad18f" gracePeriod=10 Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.753449 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.908765 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-ovsdbserver-sb\") pod \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.908820 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-config\") pod \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.908908 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-ovsdbserver-nb\") pod \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.908962 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-dns-svc\") pod \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.909159 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rlpp\" (UniqueName: \"kubernetes.io/projected/e52b6d90-ed35-4d7c-8f9a-a0030280959f-kube-api-access-9rlpp\") pod \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\" (UID: \"e52b6d90-ed35-4d7c-8f9a-a0030280959f\") " Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.919309 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e52b6d90-ed35-4d7c-8f9a-a0030280959f-kube-api-access-9rlpp" (OuterVolumeSpecName: "kube-api-access-9rlpp") pod "e52b6d90-ed35-4d7c-8f9a-a0030280959f" (UID: "e52b6d90-ed35-4d7c-8f9a-a0030280959f"). InnerVolumeSpecName "kube-api-access-9rlpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.960420 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e52b6d90-ed35-4d7c-8f9a-a0030280959f" (UID: "e52b6d90-ed35-4d7c-8f9a-a0030280959f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.960637 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e52b6d90-ed35-4d7c-8f9a-a0030280959f" (UID: "e52b6d90-ed35-4d7c-8f9a-a0030280959f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.961480 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-config" (OuterVolumeSpecName: "config") pod "e52b6d90-ed35-4d7c-8f9a-a0030280959f" (UID: "e52b6d90-ed35-4d7c-8f9a-a0030280959f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:06 crc kubenswrapper[4612]: I1203 07:44:06.972876 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e52b6d90-ed35-4d7c-8f9a-a0030280959f" (UID: "e52b6d90-ed35-4d7c-8f9a-a0030280959f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.010924 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rlpp\" (UniqueName: \"kubernetes.io/projected/e52b6d90-ed35-4d7c-8f9a-a0030280959f-kube-api-access-9rlpp\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.010982 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.010993 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.011002 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.011013 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e52b6d90-ed35-4d7c-8f9a-a0030280959f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.065206 4612 generic.go:334] "Generic (PLEG): container finished" podID="59e2290a-9814-42f9-acd7-ce36e42dd5e9" containerID="cea1d7f57747e96f9476e2e746819e45105bf76d2b170adef35ac08d2600c1c7" exitCode=0 Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.065283 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7356-account-create-update-4kqnh" event={"ID":"59e2290a-9814-42f9-acd7-ce36e42dd5e9","Type":"ContainerDied","Data":"cea1d7f57747e96f9476e2e746819e45105bf76d2b170adef35ac08d2600c1c7"} Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.066849 4612 generic.go:334] "Generic (PLEG): container finished" podID="eaf141e2-8218-4144-b0f2-d4568f152c99" containerID="7ced67c5e7adfccc3f27c6e295ffe3f36578e952ff34573fb64ec22c21c2b3ab" exitCode=0 Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.066916 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-bf84-account-create-update-jw9wr" event={"ID":"eaf141e2-8218-4144-b0f2-d4568f152c99","Type":"ContainerDied","Data":"7ced67c5e7adfccc3f27c6e295ffe3f36578e952ff34573fb64ec22c21c2b3ab"} Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.068248 4612 generic.go:334] "Generic (PLEG): container finished" podID="7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc" containerID="5f1f68d98042e9a5e9b48250509683a5bd9870bc539c66b3cffa943c8ef1dda6" exitCode=0 Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.068331 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-47bvp" event={"ID":"7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc","Type":"ContainerDied","Data":"5f1f68d98042e9a5e9b48250509683a5bd9870bc539c66b3cffa943c8ef1dda6"} Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.069623 4612 generic.go:334] "Generic (PLEG): container finished" podID="45a224b3-8904-49fe-a237-6a8d3b2755eb" containerID="2e51cd4b6e9f5b4b293be5b61d03d1bbce9f3344ef4fe9aec445cc1654c23a14" exitCode=0 Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.069742 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-tkm2k" event={"ID":"45a224b3-8904-49fe-a237-6a8d3b2755eb","Type":"ContainerDied","Data":"2e51cd4b6e9f5b4b293be5b61d03d1bbce9f3344ef4fe9aec445cc1654c23a14"} Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.071079 4612 generic.go:334] "Generic (PLEG): container finished" podID="e52b6d90-ed35-4d7c-8f9a-a0030280959f" containerID="def4251eb041843d35dc329b7e9e3ee1c258f787c2ac6a9a572f741e0e0ad18f" exitCode=0 Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.071218 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-sgg7f" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.074218 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-sgg7f" event={"ID":"e52b6d90-ed35-4d7c-8f9a-a0030280959f","Type":"ContainerDied","Data":"def4251eb041843d35dc329b7e9e3ee1c258f787c2ac6a9a572f741e0e0ad18f"} Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.074262 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-sgg7f" event={"ID":"e52b6d90-ed35-4d7c-8f9a-a0030280959f","Type":"ContainerDied","Data":"06d72f94076d76ccd9837f12d6c505e07c0230a78fa72e4b64b2e18191e5ab85"} Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.074280 4612 scope.go:117] "RemoveContainer" containerID="def4251eb041843d35dc329b7e9e3ee1c258f787c2ac6a9a572f741e0e0ad18f" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.134550 4612 scope.go:117] "RemoveContainer" containerID="4fd7ba07fb18ef9717fa2333ac7a8737ff1bd0d1bae744f2702edd0f8f29bc5e" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.169876 4612 scope.go:117] "RemoveContainer" containerID="def4251eb041843d35dc329b7e9e3ee1c258f787c2ac6a9a572f741e0e0ad18f" Dec 03 07:44:07 crc kubenswrapper[4612]: E1203 07:44:07.173865 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"def4251eb041843d35dc329b7e9e3ee1c258f787c2ac6a9a572f741e0e0ad18f\": container with ID starting with def4251eb041843d35dc329b7e9e3ee1c258f787c2ac6a9a572f741e0e0ad18f not found: ID does not exist" containerID="def4251eb041843d35dc329b7e9e3ee1c258f787c2ac6a9a572f741e0e0ad18f" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.173918 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"def4251eb041843d35dc329b7e9e3ee1c258f787c2ac6a9a572f741e0e0ad18f"} err="failed to get container status \"def4251eb041843d35dc329b7e9e3ee1c258f787c2ac6a9a572f741e0e0ad18f\": rpc error: code = NotFound desc = could not find container \"def4251eb041843d35dc329b7e9e3ee1c258f787c2ac6a9a572f741e0e0ad18f\": container with ID starting with def4251eb041843d35dc329b7e9e3ee1c258f787c2ac6a9a572f741e0e0ad18f not found: ID does not exist" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.173962 4612 scope.go:117] "RemoveContainer" containerID="4fd7ba07fb18ef9717fa2333ac7a8737ff1bd0d1bae744f2702edd0f8f29bc5e" Dec 03 07:44:07 crc kubenswrapper[4612]: E1203 07:44:07.174391 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fd7ba07fb18ef9717fa2333ac7a8737ff1bd0d1bae744f2702edd0f8f29bc5e\": container with ID starting with 4fd7ba07fb18ef9717fa2333ac7a8737ff1bd0d1bae744f2702edd0f8f29bc5e not found: ID does not exist" containerID="4fd7ba07fb18ef9717fa2333ac7a8737ff1bd0d1bae744f2702edd0f8f29bc5e" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.174424 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fd7ba07fb18ef9717fa2333ac7a8737ff1bd0d1bae744f2702edd0f8f29bc5e"} err="failed to get container status \"4fd7ba07fb18ef9717fa2333ac7a8737ff1bd0d1bae744f2702edd0f8f29bc5e\": rpc error: code = NotFound desc = could not find container \"4fd7ba07fb18ef9717fa2333ac7a8737ff1bd0d1bae744f2702edd0f8f29bc5e\": container with ID starting with 4fd7ba07fb18ef9717fa2333ac7a8737ff1bd0d1bae744f2702edd0f8f29bc5e not found: ID does not exist" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.176802 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-sgg7f"] Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.187306 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-sgg7f"] Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.426692 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-c7fgx" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.513303 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cbc4-account-create-update-whdk7" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.522499 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqj9k\" (UniqueName: \"kubernetes.io/projected/b2201704-0a33-48bd-933a-879d56b8e6e7-kube-api-access-sqj9k\") pod \"b2201704-0a33-48bd-933a-879d56b8e6e7\" (UID: \"b2201704-0a33-48bd-933a-879d56b8e6e7\") " Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.522549 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b2201704-0a33-48bd-933a-879d56b8e6e7-operator-scripts\") pod \"b2201704-0a33-48bd-933a-879d56b8e6e7\" (UID: \"b2201704-0a33-48bd-933a-879d56b8e6e7\") " Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.523173 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2201704-0a33-48bd-933a-879d56b8e6e7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b2201704-0a33-48bd-933a-879d56b8e6e7" (UID: "b2201704-0a33-48bd-933a-879d56b8e6e7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.532872 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2201704-0a33-48bd-933a-879d56b8e6e7-kube-api-access-sqj9k" (OuterVolumeSpecName: "kube-api-access-sqj9k") pod "b2201704-0a33-48bd-933a-879d56b8e6e7" (UID: "b2201704-0a33-48bd-933a-879d56b8e6e7"). InnerVolumeSpecName "kube-api-access-sqj9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.626484 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54-operator-scripts\") pod \"13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54\" (UID: \"13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54\") " Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.627688 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhl5d\" (UniqueName: \"kubernetes.io/projected/13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54-kube-api-access-nhl5d\") pod \"13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54\" (UID: \"13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54\") " Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.628457 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54" (UID: "13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.629035 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.629054 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqj9k\" (UniqueName: \"kubernetes.io/projected/b2201704-0a33-48bd-933a-879d56b8e6e7-kube-api-access-sqj9k\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.629063 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b2201704-0a33-48bd-933a-879d56b8e6e7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.633115 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54-kube-api-access-nhl5d" (OuterVolumeSpecName: "kube-api-access-nhl5d") pod "13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54" (UID: "13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54"). InnerVolumeSpecName "kube-api-access-nhl5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:07 crc kubenswrapper[4612]: I1203 07:44:07.731398 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhl5d\" (UniqueName: \"kubernetes.io/projected/13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54-kube-api-access-nhl5d\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.083816 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-c7fgx" event={"ID":"b2201704-0a33-48bd-933a-879d56b8e6e7","Type":"ContainerDied","Data":"715d7aaf894bd82f9368a7d73c8958d223c72bf30746e926d0d9749d8ed982ba"} Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.083861 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="715d7aaf894bd82f9368a7d73c8958d223c72bf30746e926d0d9749d8ed982ba" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.083940 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-c7fgx" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.092705 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cbc4-account-create-update-whdk7" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.092759 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cbc4-account-create-update-whdk7" event={"ID":"13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54","Type":"ContainerDied","Data":"7bb3f3afb1865d532380d920bdaa86d2f054e4037473b8df1f1bbc4bcffbbe81"} Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.092786 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bb3f3afb1865d532380d920bdaa86d2f054e4037473b8df1f1bbc4bcffbbe81" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.460729 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-tkm2k" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.548126 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6dll\" (UniqueName: \"kubernetes.io/projected/45a224b3-8904-49fe-a237-6a8d3b2755eb-kube-api-access-d6dll\") pod \"45a224b3-8904-49fe-a237-6a8d3b2755eb\" (UID: \"45a224b3-8904-49fe-a237-6a8d3b2755eb\") " Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.548188 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45a224b3-8904-49fe-a237-6a8d3b2755eb-operator-scripts\") pod \"45a224b3-8904-49fe-a237-6a8d3b2755eb\" (UID: \"45a224b3-8904-49fe-a237-6a8d3b2755eb\") " Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.548724 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45a224b3-8904-49fe-a237-6a8d3b2755eb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "45a224b3-8904-49fe-a237-6a8d3b2755eb" (UID: "45a224b3-8904-49fe-a237-6a8d3b2755eb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.548918 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45a224b3-8904-49fe-a237-6a8d3b2755eb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.555560 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45a224b3-8904-49fe-a237-6a8d3b2755eb-kube-api-access-d6dll" (OuterVolumeSpecName: "kube-api-access-d6dll") pod "45a224b3-8904-49fe-a237-6a8d3b2755eb" (UID: "45a224b3-8904-49fe-a237-6a8d3b2755eb"). InnerVolumeSpecName "kube-api-access-d6dll". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.555646 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7356-account-create-update-4kqnh" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.579521 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-47bvp" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.604562 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bf84-account-create-update-jw9wr" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.658568 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59e2290a-9814-42f9-acd7-ce36e42dd5e9-operator-scripts\") pod \"59e2290a-9814-42f9-acd7-ce36e42dd5e9\" (UID: \"59e2290a-9814-42f9-acd7-ce36e42dd5e9\") " Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.658620 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgkpw\" (UniqueName: \"kubernetes.io/projected/59e2290a-9814-42f9-acd7-ce36e42dd5e9-kube-api-access-tgkpw\") pod \"59e2290a-9814-42f9-acd7-ce36e42dd5e9\" (UID: \"59e2290a-9814-42f9-acd7-ce36e42dd5e9\") " Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.659446 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6dll\" (UniqueName: \"kubernetes.io/projected/45a224b3-8904-49fe-a237-6a8d3b2755eb-kube-api-access-d6dll\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.659808 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59e2290a-9814-42f9-acd7-ce36e42dd5e9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "59e2290a-9814-42f9-acd7-ce36e42dd5e9" (UID: "59e2290a-9814-42f9-acd7-ce36e42dd5e9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.665231 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59e2290a-9814-42f9-acd7-ce36e42dd5e9-kube-api-access-tgkpw" (OuterVolumeSpecName: "kube-api-access-tgkpw") pod "59e2290a-9814-42f9-acd7-ce36e42dd5e9" (UID: "59e2290a-9814-42f9-acd7-ce36e42dd5e9"). InnerVolumeSpecName "kube-api-access-tgkpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.761024 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaf141e2-8218-4144-b0f2-d4568f152c99-operator-scripts\") pod \"eaf141e2-8218-4144-b0f2-d4568f152c99\" (UID: \"eaf141e2-8218-4144-b0f2-d4568f152c99\") " Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.761106 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n87ms\" (UniqueName: \"kubernetes.io/projected/eaf141e2-8218-4144-b0f2-d4568f152c99-kube-api-access-n87ms\") pod \"eaf141e2-8218-4144-b0f2-d4568f152c99\" (UID: \"eaf141e2-8218-4144-b0f2-d4568f152c99\") " Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.761211 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9tmb\" (UniqueName: \"kubernetes.io/projected/7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc-kube-api-access-l9tmb\") pod \"7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc\" (UID: \"7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc\") " Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.761247 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc-operator-scripts\") pod \"7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc\" (UID: \"7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc\") " Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.762055 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgkpw\" (UniqueName: \"kubernetes.io/projected/59e2290a-9814-42f9-acd7-ce36e42dd5e9-kube-api-access-tgkpw\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.762076 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59e2290a-9814-42f9-acd7-ce36e42dd5e9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.762234 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eaf141e2-8218-4144-b0f2-d4568f152c99-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eaf141e2-8218-4144-b0f2-d4568f152c99" (UID: "eaf141e2-8218-4144-b0f2-d4568f152c99"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.762432 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc" (UID: "7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.765983 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc-kube-api-access-l9tmb" (OuterVolumeSpecName: "kube-api-access-l9tmb") pod "7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc" (UID: "7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc"). InnerVolumeSpecName "kube-api-access-l9tmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.766101 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaf141e2-8218-4144-b0f2-d4568f152c99-kube-api-access-n87ms" (OuterVolumeSpecName: "kube-api-access-n87ms") pod "eaf141e2-8218-4144-b0f2-d4568f152c99" (UID: "eaf141e2-8218-4144-b0f2-d4568f152c99"). InnerVolumeSpecName "kube-api-access-n87ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.869474 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9tmb\" (UniqueName: \"kubernetes.io/projected/7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc-kube-api-access-l9tmb\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.869503 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.869513 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaf141e2-8218-4144-b0f2-d4568f152c99-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:08 crc kubenswrapper[4612]: I1203 07:44:08.869521 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n87ms\" (UniqueName: \"kubernetes.io/projected/eaf141e2-8218-4144-b0f2-d4568f152c99-kube-api-access-n87ms\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:09 crc kubenswrapper[4612]: I1203 07:44:09.098506 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e52b6d90-ed35-4d7c-8f9a-a0030280959f" path="/var/lib/kubelet/pods/e52b6d90-ed35-4d7c-8f9a-a0030280959f/volumes" Dec 03 07:44:09 crc kubenswrapper[4612]: I1203 07:44:09.101521 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7356-account-create-update-4kqnh" Dec 03 07:44:09 crc kubenswrapper[4612]: I1203 07:44:09.101565 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7356-account-create-update-4kqnh" event={"ID":"59e2290a-9814-42f9-acd7-ce36e42dd5e9","Type":"ContainerDied","Data":"f7a24b6100779c69f593a473238394b7078d34659ec4a5cd9faa351a273b6a91"} Dec 03 07:44:09 crc kubenswrapper[4612]: I1203 07:44:09.101606 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7a24b6100779c69f593a473238394b7078d34659ec4a5cd9faa351a273b6a91" Dec 03 07:44:09 crc kubenswrapper[4612]: I1203 07:44:09.103199 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bf84-account-create-update-jw9wr" Dec 03 07:44:09 crc kubenswrapper[4612]: I1203 07:44:09.103188 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-bf84-account-create-update-jw9wr" event={"ID":"eaf141e2-8218-4144-b0f2-d4568f152c99","Type":"ContainerDied","Data":"4f91aeef871684b076cca21ae7678531868f54f433bf52bbc15dbed05ed74c76"} Dec 03 07:44:09 crc kubenswrapper[4612]: I1203 07:44:09.103242 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f91aeef871684b076cca21ae7678531868f54f433bf52bbc15dbed05ed74c76" Dec 03 07:44:09 crc kubenswrapper[4612]: I1203 07:44:09.110569 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-47bvp" Dec 03 07:44:09 crc kubenswrapper[4612]: I1203 07:44:09.110582 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-47bvp" event={"ID":"7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc","Type":"ContainerDied","Data":"00f0e957a442aa17bdbc86199bf5cc42697a79e47617bb06fd7eb286cf68f738"} Dec 03 07:44:09 crc kubenswrapper[4612]: I1203 07:44:09.110618 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00f0e957a442aa17bdbc86199bf5cc42697a79e47617bb06fd7eb286cf68f738" Dec 03 07:44:09 crc kubenswrapper[4612]: I1203 07:44:09.115612 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-tkm2k" event={"ID":"45a224b3-8904-49fe-a237-6a8d3b2755eb","Type":"ContainerDied","Data":"af9bc12e3a8232a0539f02a5defaf8a565b7e209500fdcf68105da6bce9ceba3"} Dec 03 07:44:09 crc kubenswrapper[4612]: I1203 07:44:09.115659 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af9bc12e3a8232a0539f02a5defaf8a565b7e209500fdcf68105da6bce9ceba3" Dec 03 07:44:09 crc kubenswrapper[4612]: I1203 07:44:09.115724 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-tkm2k" Dec 03 07:44:10 crc kubenswrapper[4612]: E1203 07:44:10.685395 4612 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf87dca8_ceaa_424a_8074_7a63c648b84b.slice/crio-253ee07d7cd8000b3306a0841b31f595ab9d1ea323b7f796e6790764b3205b1e.scope\": RecentStats: unable to find data in memory cache]" Dec 03 07:44:11 crc kubenswrapper[4612]: I1203 07:44:11.131937 4612 generic.go:334] "Generic (PLEG): container finished" podID="52c23f49-b562-4a42-a8bc-b2214d1f8afe" containerID="8a5e9dbc4ad2bf4a254c8ed03a3f0ab6ef31feaf8816c7ca29b6f84aeaa17c8f" exitCode=0 Dec 03 07:44:11 crc kubenswrapper[4612]: I1203 07:44:11.132196 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"52c23f49-b562-4a42-a8bc-b2214d1f8afe","Type":"ContainerDied","Data":"8a5e9dbc4ad2bf4a254c8ed03a3f0ab6ef31feaf8816c7ca29b6f84aeaa17c8f"} Dec 03 07:44:11 crc kubenswrapper[4612]: I1203 07:44:11.133702 4612 generic.go:334] "Generic (PLEG): container finished" podID="5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" containerID="cd57db7eedc85dbf36d0fa09300b355454260f405835018b1b803eaa868cfe74" exitCode=0 Dec 03 07:44:11 crc kubenswrapper[4612]: I1203 07:44:11.133805 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1","Type":"ContainerDied","Data":"cd57db7eedc85dbf36d0fa09300b355454260f405835018b1b803eaa868cfe74"} Dec 03 07:44:11 crc kubenswrapper[4612]: I1203 07:44:11.818556 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 03 07:44:12 crc kubenswrapper[4612]: I1203 07:44:12.142998 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1","Type":"ContainerStarted","Data":"124a81902b9c84c94bd0c9f27c1be3a4c2204f1cde756f498162032fe979198c"} Dec 03 07:44:12 crc kubenswrapper[4612]: I1203 07:44:12.143442 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 03 07:44:12 crc kubenswrapper[4612]: I1203 07:44:12.145352 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"52c23f49-b562-4a42-a8bc-b2214d1f8afe","Type":"ContainerStarted","Data":"507cb878f417831a7ff1d54d5c7504e5c85241562a5aca7a3fc5204539d7f8a2"} Dec 03 07:44:12 crc kubenswrapper[4612]: I1203 07:44:12.145537 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:44:12 crc kubenswrapper[4612]: I1203 07:44:12.163993 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=41.835737634 podStartE2EDuration="1m4.163973216s" podCreationTimestamp="2025-12-03 07:43:08 +0000 UTC" firstStartedPulling="2025-12-03 07:43:10.719286063 +0000 UTC m=+953.892643463" lastFinishedPulling="2025-12-03 07:43:33.047521645 +0000 UTC m=+976.220879045" observedRunningTime="2025-12-03 07:44:12.161303161 +0000 UTC m=+1015.334660571" watchObservedRunningTime="2025-12-03 07:44:12.163973216 +0000 UTC m=+1015.337330626" Dec 03 07:44:12 crc kubenswrapper[4612]: I1203 07:44:12.197178 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.241369265 podStartE2EDuration="1m4.197155091s" podCreationTimestamp="2025-12-03 07:43:08 +0000 UTC" firstStartedPulling="2025-12-03 07:43:10.944530438 +0000 UTC m=+954.117887838" lastFinishedPulling="2025-12-03 07:43:37.900316254 +0000 UTC m=+981.073673664" observedRunningTime="2025-12-03 07:44:12.191184486 +0000 UTC m=+1015.364541886" watchObservedRunningTime="2025-12-03 07:44:12.197155091 +0000 UTC m=+1015.370512491" Dec 03 07:44:12 crc kubenswrapper[4612]: I1203 07:44:12.730116 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:44:12 crc kubenswrapper[4612]: I1203 07:44:12.742046 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/199ac340-6fa4-414c-b9b1-80aff6965bc0-etc-swift\") pod \"swift-storage-0\" (UID: \"199ac340-6fa4-414c-b9b1-80aff6965bc0\") " pod="openstack/swift-storage-0" Dec 03 07:44:12 crc kubenswrapper[4612]: I1203 07:44:12.765108 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 03 07:44:13 crc kubenswrapper[4612]: I1203 07:44:13.152640 4612 generic.go:334] "Generic (PLEG): container finished" podID="0aff4c7e-d189-4658-b1a6-388353c8dfa8" containerID="114e29d2ba75340c8d2fe3b988b4b03dd008546d1716135ddc7b37b90e1c9294" exitCode=0 Dec 03 07:44:13 crc kubenswrapper[4612]: I1203 07:44:13.153708 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-p474m" event={"ID":"0aff4c7e-d189-4658-b1a6-388353c8dfa8","Type":"ContainerDied","Data":"114e29d2ba75340c8d2fe3b988b4b03dd008546d1716135ddc7b37b90e1c9294"} Dec 03 07:44:13 crc kubenswrapper[4612]: I1203 07:44:13.421855 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 03 07:44:13 crc kubenswrapper[4612]: W1203 07:44:13.432322 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod199ac340_6fa4_414c_b9b1_80aff6965bc0.slice/crio-1777ee4fae7e2d2d073307e1adbd9bc1b2d68eb17d93ee5fa2a98d8acefadca3 WatchSource:0}: Error finding container 1777ee4fae7e2d2d073307e1adbd9bc1b2d68eb17d93ee5fa2a98d8acefadca3: Status 404 returned error can't find the container with id 1777ee4fae7e2d2d073307e1adbd9bc1b2d68eb17d93ee5fa2a98d8acefadca3 Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.148927 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-j7748" podUID="385edacb-e835-42f4-a521-7c321043b989" containerName="ovn-controller" probeResult="failure" output=< Dec 03 07:44:14 crc kubenswrapper[4612]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 03 07:44:14 crc kubenswrapper[4612]: > Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.183073 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"1777ee4fae7e2d2d073307e1adbd9bc1b2d68eb17d93ee5fa2a98d8acefadca3"} Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.184688 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.271571 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-wzkm4"] Dec 03 07:44:14 crc kubenswrapper[4612]: E1203 07:44:14.272152 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45a224b3-8904-49fe-a237-6a8d3b2755eb" containerName="mariadb-database-create" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.272237 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="45a224b3-8904-49fe-a237-6a8d3b2755eb" containerName="mariadb-database-create" Dec 03 07:44:14 crc kubenswrapper[4612]: E1203 07:44:14.272292 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e52b6d90-ed35-4d7c-8f9a-a0030280959f" containerName="dnsmasq-dns" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.272339 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="e52b6d90-ed35-4d7c-8f9a-a0030280959f" containerName="dnsmasq-dns" Dec 03 07:44:14 crc kubenswrapper[4612]: E1203 07:44:14.272388 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2201704-0a33-48bd-933a-879d56b8e6e7" containerName="mariadb-database-create" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.272438 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2201704-0a33-48bd-933a-879d56b8e6e7" containerName="mariadb-database-create" Dec 03 07:44:14 crc kubenswrapper[4612]: E1203 07:44:14.272496 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e52b6d90-ed35-4d7c-8f9a-a0030280959f" containerName="init" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.272546 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="e52b6d90-ed35-4d7c-8f9a-a0030280959f" containerName="init" Dec 03 07:44:14 crc kubenswrapper[4612]: E1203 07:44:14.272600 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaf141e2-8218-4144-b0f2-d4568f152c99" containerName="mariadb-account-create-update" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.272657 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaf141e2-8218-4144-b0f2-d4568f152c99" containerName="mariadb-account-create-update" Dec 03 07:44:14 crc kubenswrapper[4612]: E1203 07:44:14.272723 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59e2290a-9814-42f9-acd7-ce36e42dd5e9" containerName="mariadb-account-create-update" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.272773 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="59e2290a-9814-42f9-acd7-ce36e42dd5e9" containerName="mariadb-account-create-update" Dec 03 07:44:14 crc kubenswrapper[4612]: E1203 07:44:14.272842 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc" containerName="mariadb-database-create" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.272909 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc" containerName="mariadb-database-create" Dec 03 07:44:14 crc kubenswrapper[4612]: E1203 07:44:14.272998 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54" containerName="mariadb-account-create-update" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.273054 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54" containerName="mariadb-account-create-update" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.273246 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2201704-0a33-48bd-933a-879d56b8e6e7" containerName="mariadb-database-create" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.273311 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="e52b6d90-ed35-4d7c-8f9a-a0030280959f" containerName="dnsmasq-dns" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.273370 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaf141e2-8218-4144-b0f2-d4568f152c99" containerName="mariadb-account-create-update" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.273433 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="59e2290a-9814-42f9-acd7-ce36e42dd5e9" containerName="mariadb-account-create-update" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.273487 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="45a224b3-8904-49fe-a237-6a8d3b2755eb" containerName="mariadb-database-create" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.273540 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54" containerName="mariadb-account-create-update" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.273601 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc" containerName="mariadb-database-create" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.274198 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.277955 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-d6pcb" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.278189 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.284191 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-wzkm4"] Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.357211 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-config-data\") pod \"glance-db-sync-wzkm4\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.357267 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhjkb\" (UniqueName: \"kubernetes.io/projected/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-kube-api-access-rhjkb\") pod \"glance-db-sync-wzkm4\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.357335 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-combined-ca-bundle\") pod \"glance-db-sync-wzkm4\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.357398 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-db-sync-config-data\") pod \"glance-db-sync-wzkm4\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.458486 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-combined-ca-bundle\") pod \"glance-db-sync-wzkm4\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.458570 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-db-sync-config-data\") pod \"glance-db-sync-wzkm4\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.458677 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-config-data\") pod \"glance-db-sync-wzkm4\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.458711 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhjkb\" (UniqueName: \"kubernetes.io/projected/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-kube-api-access-rhjkb\") pod \"glance-db-sync-wzkm4\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.467067 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-combined-ca-bundle\") pod \"glance-db-sync-wzkm4\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.467361 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-db-sync-config-data\") pod \"glance-db-sync-wzkm4\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.467881 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-config-data\") pod \"glance-db-sync-wzkm4\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.483485 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhjkb\" (UniqueName: \"kubernetes.io/projected/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-kube-api-access-rhjkb\") pod \"glance-db-sync-wzkm4\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.596417 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:14 crc kubenswrapper[4612]: I1203 07:44:14.930796 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.068914 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0aff4c7e-d189-4658-b1a6-388353c8dfa8-scripts\") pod \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.069549 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-dispersionconf\") pod \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.069884 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0aff4c7e-d189-4658-b1a6-388353c8dfa8-ring-data-devices\") pod \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.070025 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-combined-ca-bundle\") pod \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.070132 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-swiftconf\") pod \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.070264 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ns5wg\" (UniqueName: \"kubernetes.io/projected/0aff4c7e-d189-4658-b1a6-388353c8dfa8-kube-api-access-ns5wg\") pod \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.070466 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0aff4c7e-d189-4658-b1a6-388353c8dfa8-etc-swift\") pod \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\" (UID: \"0aff4c7e-d189-4658-b1a6-388353c8dfa8\") " Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.070564 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0aff4c7e-d189-4658-b1a6-388353c8dfa8-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "0aff4c7e-d189-4658-b1a6-388353c8dfa8" (UID: "0aff4c7e-d189-4658-b1a6-388353c8dfa8"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.070802 4612 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0aff4c7e-d189-4658-b1a6-388353c8dfa8-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.071522 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0aff4c7e-d189-4658-b1a6-388353c8dfa8-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "0aff4c7e-d189-4658-b1a6-388353c8dfa8" (UID: "0aff4c7e-d189-4658-b1a6-388353c8dfa8"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.079876 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0aff4c7e-d189-4658-b1a6-388353c8dfa8-kube-api-access-ns5wg" (OuterVolumeSpecName: "kube-api-access-ns5wg") pod "0aff4c7e-d189-4658-b1a6-388353c8dfa8" (UID: "0aff4c7e-d189-4658-b1a6-388353c8dfa8"). InnerVolumeSpecName "kube-api-access-ns5wg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.093931 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "0aff4c7e-d189-4658-b1a6-388353c8dfa8" (UID: "0aff4c7e-d189-4658-b1a6-388353c8dfa8"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.096330 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0aff4c7e-d189-4658-b1a6-388353c8dfa8" (UID: "0aff4c7e-d189-4658-b1a6-388353c8dfa8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.125897 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "0aff4c7e-d189-4658-b1a6-388353c8dfa8" (UID: "0aff4c7e-d189-4658-b1a6-388353c8dfa8"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.129486 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0aff4c7e-d189-4658-b1a6-388353c8dfa8-scripts" (OuterVolumeSpecName: "scripts") pod "0aff4c7e-d189-4658-b1a6-388353c8dfa8" (UID: "0aff4c7e-d189-4658-b1a6-388353c8dfa8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.172478 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ns5wg\" (UniqueName: \"kubernetes.io/projected/0aff4c7e-d189-4658-b1a6-388353c8dfa8-kube-api-access-ns5wg\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.172625 4612 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0aff4c7e-d189-4658-b1a6-388353c8dfa8-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.172685 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0aff4c7e-d189-4658-b1a6-388353c8dfa8-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.173039 4612 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.173131 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.173194 4612 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0aff4c7e-d189-4658-b1a6-388353c8dfa8-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.191680 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"b90449660610cd08ddea6f32378c41becad1106aa8e8b7255713f56763562afd"} Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.194490 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-p474m" event={"ID":"0aff4c7e-d189-4658-b1a6-388353c8dfa8","Type":"ContainerDied","Data":"fb6fbc2aaef0848c5442596ba1d31f42ac9c9cf159369a83c7ad84bea2a20fb3"} Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.194521 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb6fbc2aaef0848c5442596ba1d31f42ac9c9cf159369a83c7ad84bea2a20fb3" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.194581 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-p474m" Dec 03 07:44:15 crc kubenswrapper[4612]: I1203 07:44:15.439653 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-wzkm4"] Dec 03 07:44:15 crc kubenswrapper[4612]: W1203 07:44:15.440247 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58391f1e_0f92_4c3c_844d_b74b3bfd1af6.slice/crio-e17310d05f8f0855c16b1c0d2b36a03ebd136487e8cf4280af0f4b7b52f38264 WatchSource:0}: Error finding container e17310d05f8f0855c16b1c0d2b36a03ebd136487e8cf4280af0f4b7b52f38264: Status 404 returned error can't find the container with id e17310d05f8f0855c16b1c0d2b36a03ebd136487e8cf4280af0f4b7b52f38264 Dec 03 07:44:16 crc kubenswrapper[4612]: I1203 07:44:16.203265 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-wzkm4" event={"ID":"58391f1e-0f92-4c3c-844d-b74b3bfd1af6","Type":"ContainerStarted","Data":"e17310d05f8f0855c16b1c0d2b36a03ebd136487e8cf4280af0f4b7b52f38264"} Dec 03 07:44:16 crc kubenswrapper[4612]: I1203 07:44:16.206192 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"66aa67e30eb568b58052f233f76aa62378c7259df99193a3c51682037e9e4fc8"} Dec 03 07:44:16 crc kubenswrapper[4612]: I1203 07:44:16.206219 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"8dd925067118384bf96dd06a4b2df6d8cb3202e73ad3b93ed17df3b1c914abc5"} Dec 03 07:44:16 crc kubenswrapper[4612]: I1203 07:44:16.206230 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"d935a779eeb6bad2437a60ef2bb2e6659f9d4c245ff70ddcff7ca294173cdb53"} Dec 03 07:44:18 crc kubenswrapper[4612]: I1203 07:44:18.236640 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"682d300e2be0092fe8d9bec1c55ab79d64de61a34effe4271c87031a375ce25e"} Dec 03 07:44:18 crc kubenswrapper[4612]: I1203 07:44:18.237238 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"260733970bc8c166e29e5060dc3cfb1ac3a1853a89d0eb2e0aa3b7f78112dc96"} Dec 03 07:44:18 crc kubenswrapper[4612]: I1203 07:44:18.237255 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"094e7a12636fa91428225ab9ec10c15e837c527cc5887cc70c60332ad48cafa6"} Dec 03 07:44:18 crc kubenswrapper[4612]: I1203 07:44:18.237266 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"f64964b70361d54d34958f5cf62469614637b2f4f982732b14c74b5c6b19c8ec"} Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.155930 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-j7748" podUID="385edacb-e835-42f4-a521-7c321043b989" containerName="ovn-controller" probeResult="failure" output=< Dec 03 07:44:19 crc kubenswrapper[4612]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 03 07:44:19 crc kubenswrapper[4612]: > Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.186656 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-ng85x" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.409679 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-j7748-config-4ffzd"] Dec 03 07:44:19 crc kubenswrapper[4612]: E1203 07:44:19.410397 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aff4c7e-d189-4658-b1a6-388353c8dfa8" containerName="swift-ring-rebalance" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.410416 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aff4c7e-d189-4658-b1a6-388353c8dfa8" containerName="swift-ring-rebalance" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.410646 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aff4c7e-d189-4658-b1a6-388353c8dfa8" containerName="swift-ring-rebalance" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.411345 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.414251 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.423888 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j7748-config-4ffzd"] Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.550283 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d0142de5-2b0a-478b-b52e-4994b412c7c1-additional-scripts\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.550373 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfnfl\" (UniqueName: \"kubernetes.io/projected/d0142de5-2b0a-478b-b52e-4994b412c7c1-kube-api-access-lfnfl\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.550393 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-log-ovn\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.550445 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-run\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.550598 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-run-ovn\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.550663 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0142de5-2b0a-478b-b52e-4994b412c7c1-scripts\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.652026 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0142de5-2b0a-478b-b52e-4994b412c7c1-scripts\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.652088 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d0142de5-2b0a-478b-b52e-4994b412c7c1-additional-scripts\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.652139 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfnfl\" (UniqueName: \"kubernetes.io/projected/d0142de5-2b0a-478b-b52e-4994b412c7c1-kube-api-access-lfnfl\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.652162 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-log-ovn\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.652181 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-run\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.652215 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-run-ovn\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.652450 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-run-ovn\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.652493 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-log-ovn\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.652528 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-run\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.653106 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d0142de5-2b0a-478b-b52e-4994b412c7c1-additional-scripts\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.653728 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0142de5-2b0a-478b-b52e-4994b412c7c1-scripts\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.672780 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfnfl\" (UniqueName: \"kubernetes.io/projected/d0142de5-2b0a-478b-b52e-4994b412c7c1-kube-api-access-lfnfl\") pod \"ovn-controller-j7748-config-4ffzd\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:19 crc kubenswrapper[4612]: I1203 07:44:19.730075 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:20 crc kubenswrapper[4612]: I1203 07:44:20.259052 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"0e9f0362407302aa6b84e369b75181d4655c106e46787d8ab624149883f449a1"} Dec 03 07:44:20 crc kubenswrapper[4612]: I1203 07:44:20.259438 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"ee003b192a73971156e434cc8a28523f34dee4831c4d3c6c38a94bb72d4e67a5"} Dec 03 07:44:20 crc kubenswrapper[4612]: I1203 07:44:20.319958 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j7748-config-4ffzd"] Dec 03 07:44:20 crc kubenswrapper[4612]: W1203 07:44:20.331501 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0142de5_2b0a_478b_b52e_4994b412c7c1.slice/crio-a449552ef3a1ae80c9bbc7ff703d494ca90ae015a68832edf4222e22640af0a0 WatchSource:0}: Error finding container a449552ef3a1ae80c9bbc7ff703d494ca90ae015a68832edf4222e22640af0a0: Status 404 returned error can't find the container with id a449552ef3a1ae80c9bbc7ff703d494ca90ae015a68832edf4222e22640af0a0 Dec 03 07:44:21 crc kubenswrapper[4612]: I1203 07:44:21.274873 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"e28e66005e1f184de9049b69676dc977a189614dd30b5f8c4125de6b53536d56"} Dec 03 07:44:21 crc kubenswrapper[4612]: I1203 07:44:21.275569 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"2b1cde434470d3ab135edf4af1b4234aa6d4d502581186376eef6f06979df710"} Dec 03 07:44:21 crc kubenswrapper[4612]: I1203 07:44:21.278923 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j7748-config-4ffzd" event={"ID":"d0142de5-2b0a-478b-b52e-4994b412c7c1","Type":"ContainerStarted","Data":"99042361a5043bc8cb76409151afbb098a169d6024827d41c6c9f7e3f21d3361"} Dec 03 07:44:21 crc kubenswrapper[4612]: I1203 07:44:21.279008 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j7748-config-4ffzd" event={"ID":"d0142de5-2b0a-478b-b52e-4994b412c7c1","Type":"ContainerStarted","Data":"a449552ef3a1ae80c9bbc7ff703d494ca90ae015a68832edf4222e22640af0a0"} Dec 03 07:44:21 crc kubenswrapper[4612]: I1203 07:44:21.305132 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-j7748-config-4ffzd" podStartSLOduration=2.305111412 podStartE2EDuration="2.305111412s" podCreationTimestamp="2025-12-03 07:44:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:44:21.300319706 +0000 UTC m=+1024.473677106" watchObservedRunningTime="2025-12-03 07:44:21.305111412 +0000 UTC m=+1024.478468812" Dec 03 07:44:22 crc kubenswrapper[4612]: I1203 07:44:22.287573 4612 generic.go:334] "Generic (PLEG): container finished" podID="d0142de5-2b0a-478b-b52e-4994b412c7c1" containerID="99042361a5043bc8cb76409151afbb098a169d6024827d41c6c9f7e3f21d3361" exitCode=0 Dec 03 07:44:22 crc kubenswrapper[4612]: I1203 07:44:22.287905 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j7748-config-4ffzd" event={"ID":"d0142de5-2b0a-478b-b52e-4994b412c7c1","Type":"ContainerDied","Data":"99042361a5043bc8cb76409151afbb098a169d6024827d41c6c9f7e3f21d3361"} Dec 03 07:44:22 crc kubenswrapper[4612]: I1203 07:44:22.296158 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"99aae4bb8a4875e3d5b157fd7904255d93156f2fa772032b8506770c1b80ccfc"} Dec 03 07:44:22 crc kubenswrapper[4612]: I1203 07:44:22.296205 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"bc0dedcbd0f92de6c7f44384639391a0ef9030e3fb4335067dc9edd5b769e171"} Dec 03 07:44:24 crc kubenswrapper[4612]: I1203 07:44:24.167138 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-j7748" Dec 03 07:44:29 crc kubenswrapper[4612]: I1203 07:44:29.855349 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.199028 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-rmxx5"] Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.200777 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-rmxx5" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.213978 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-rmxx5"] Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.219804 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dz5w\" (UniqueName: \"kubernetes.io/projected/0fb3251d-18b1-42f0-b048-3b7de91538a1-kube-api-access-7dz5w\") pod \"cinder-db-create-rmxx5\" (UID: \"0fb3251d-18b1-42f0-b048-3b7de91538a1\") " pod="openstack/cinder-db-create-rmxx5" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.219862 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fb3251d-18b1-42f0-b048-3b7de91538a1-operator-scripts\") pod \"cinder-db-create-rmxx5\" (UID: \"0fb3251d-18b1-42f0-b048-3b7de91538a1\") " pod="openstack/cinder-db-create-rmxx5" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.306318 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.322438 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dz5w\" (UniqueName: \"kubernetes.io/projected/0fb3251d-18b1-42f0-b048-3b7de91538a1-kube-api-access-7dz5w\") pod \"cinder-db-create-rmxx5\" (UID: \"0fb3251d-18b1-42f0-b048-3b7de91538a1\") " pod="openstack/cinder-db-create-rmxx5" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.322514 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fb3251d-18b1-42f0-b048-3b7de91538a1-operator-scripts\") pod \"cinder-db-create-rmxx5\" (UID: \"0fb3251d-18b1-42f0-b048-3b7de91538a1\") " pod="openstack/cinder-db-create-rmxx5" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.323522 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fb3251d-18b1-42f0-b048-3b7de91538a1-operator-scripts\") pod \"cinder-db-create-rmxx5\" (UID: \"0fb3251d-18b1-42f0-b048-3b7de91538a1\") " pod="openstack/cinder-db-create-rmxx5" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.344247 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dz5w\" (UniqueName: \"kubernetes.io/projected/0fb3251d-18b1-42f0-b048-3b7de91538a1-kube-api-access-7dz5w\") pod \"cinder-db-create-rmxx5\" (UID: \"0fb3251d-18b1-42f0-b048-3b7de91538a1\") " pod="openstack/cinder-db-create-rmxx5" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.424513 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-f5c8-account-create-update-qhpqt"] Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.426237 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f5c8-account-create-update-qhpqt" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.435775 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.461817 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-f5c8-account-create-update-qhpqt"] Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.598572 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-rmxx5" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.628678 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkzlt\" (UniqueName: \"kubernetes.io/projected/eb7ad4dc-78e8-4503-936c-dd76a4c73175-kube-api-access-nkzlt\") pod \"cinder-f5c8-account-create-update-qhpqt\" (UID: \"eb7ad4dc-78e8-4503-936c-dd76a4c73175\") " pod="openstack/cinder-f5c8-account-create-update-qhpqt" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.628817 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb7ad4dc-78e8-4503-936c-dd76a4c73175-operator-scripts\") pod \"cinder-f5c8-account-create-update-qhpqt\" (UID: \"eb7ad4dc-78e8-4503-936c-dd76a4c73175\") " pod="openstack/cinder-f5c8-account-create-update-qhpqt" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.730096 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkzlt\" (UniqueName: \"kubernetes.io/projected/eb7ad4dc-78e8-4503-936c-dd76a4c73175-kube-api-access-nkzlt\") pod \"cinder-f5c8-account-create-update-qhpqt\" (UID: \"eb7ad4dc-78e8-4503-936c-dd76a4c73175\") " pod="openstack/cinder-f5c8-account-create-update-qhpqt" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.730513 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb7ad4dc-78e8-4503-936c-dd76a4c73175-operator-scripts\") pod \"cinder-f5c8-account-create-update-qhpqt\" (UID: \"eb7ad4dc-78e8-4503-936c-dd76a4c73175\") " pod="openstack/cinder-f5c8-account-create-update-qhpqt" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.731261 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb7ad4dc-78e8-4503-936c-dd76a4c73175-operator-scripts\") pod \"cinder-f5c8-account-create-update-qhpqt\" (UID: \"eb7ad4dc-78e8-4503-936c-dd76a4c73175\") " pod="openstack/cinder-f5c8-account-create-update-qhpqt" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.769338 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkzlt\" (UniqueName: \"kubernetes.io/projected/eb7ad4dc-78e8-4503-936c-dd76a4c73175-kube-api-access-nkzlt\") pod \"cinder-f5c8-account-create-update-qhpqt\" (UID: \"eb7ad4dc-78e8-4503-936c-dd76a4c73175\") " pod="openstack/cinder-f5c8-account-create-update-qhpqt" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.816087 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-f2fmf"] Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.817459 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-f2fmf" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.832662 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfm9h\" (UniqueName: \"kubernetes.io/projected/91c6a1d5-8062-4e6c-b12e-a44ed73f7038-kube-api-access-bfm9h\") pod \"barbican-db-create-f2fmf\" (UID: \"91c6a1d5-8062-4e6c-b12e-a44ed73f7038\") " pod="openstack/barbican-db-create-f2fmf" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.832731 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91c6a1d5-8062-4e6c-b12e-a44ed73f7038-operator-scripts\") pod \"barbican-db-create-f2fmf\" (UID: \"91c6a1d5-8062-4e6c-b12e-a44ed73f7038\") " pod="openstack/barbican-db-create-f2fmf" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.839458 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.855884 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-f2fmf"] Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.936642 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfm9h\" (UniqueName: \"kubernetes.io/projected/91c6a1d5-8062-4e6c-b12e-a44ed73f7038-kube-api-access-bfm9h\") pod \"barbican-db-create-f2fmf\" (UID: \"91c6a1d5-8062-4e6c-b12e-a44ed73f7038\") " pod="openstack/barbican-db-create-f2fmf" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.936719 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91c6a1d5-8062-4e6c-b12e-a44ed73f7038-operator-scripts\") pod \"barbican-db-create-f2fmf\" (UID: \"91c6a1d5-8062-4e6c-b12e-a44ed73f7038\") " pod="openstack/barbican-db-create-f2fmf" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.937901 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91c6a1d5-8062-4e6c-b12e-a44ed73f7038-operator-scripts\") pod \"barbican-db-create-f2fmf\" (UID: \"91c6a1d5-8062-4e6c-b12e-a44ed73f7038\") " pod="openstack/barbican-db-create-f2fmf" Dec 03 07:44:30 crc kubenswrapper[4612]: I1203 07:44:30.993595 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfm9h\" (UniqueName: \"kubernetes.io/projected/91c6a1d5-8062-4e6c-b12e-a44ed73f7038-kube-api-access-bfm9h\") pod \"barbican-db-create-f2fmf\" (UID: \"91c6a1d5-8062-4e6c-b12e-a44ed73f7038\") " pod="openstack/barbican-db-create-f2fmf" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.046563 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-log-ovn\") pod \"d0142de5-2b0a-478b-b52e-4994b412c7c1\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.046636 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-run-ovn\") pod \"d0142de5-2b0a-478b-b52e-4994b412c7c1\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.046663 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfnfl\" (UniqueName: \"kubernetes.io/projected/d0142de5-2b0a-478b-b52e-4994b412c7c1-kube-api-access-lfnfl\") pod \"d0142de5-2b0a-478b-b52e-4994b412c7c1\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.046732 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0142de5-2b0a-478b-b52e-4994b412c7c1-scripts\") pod \"d0142de5-2b0a-478b-b52e-4994b412c7c1\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.046787 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d0142de5-2b0a-478b-b52e-4994b412c7c1-additional-scripts\") pod \"d0142de5-2b0a-478b-b52e-4994b412c7c1\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.046877 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-run\") pod \"d0142de5-2b0a-478b-b52e-4994b412c7c1\" (UID: \"d0142de5-2b0a-478b-b52e-4994b412c7c1\") " Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.047932 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "d0142de5-2b0a-478b-b52e-4994b412c7c1" (UID: "d0142de5-2b0a-478b-b52e-4994b412c7c1"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.048141 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "d0142de5-2b0a-478b-b52e-4994b412c7c1" (UID: "d0142de5-2b0a-478b-b52e-4994b412c7c1"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.049314 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0142de5-2b0a-478b-b52e-4994b412c7c1-scripts" (OuterVolumeSpecName: "scripts") pod "d0142de5-2b0a-478b-b52e-4994b412c7c1" (UID: "d0142de5-2b0a-478b-b52e-4994b412c7c1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.050356 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0142de5-2b0a-478b-b52e-4994b412c7c1-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "d0142de5-2b0a-478b-b52e-4994b412c7c1" (UID: "d0142de5-2b0a-478b-b52e-4994b412c7c1"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.052019 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-4643-account-create-update-rv8kg"] Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.052221 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-run" (OuterVolumeSpecName: "var-run") pod "d0142de5-2b0a-478b-b52e-4994b412c7c1" (UID: "d0142de5-2b0a-478b-b52e-4994b412c7c1"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:44:31 crc kubenswrapper[4612]: E1203 07:44:31.052447 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0142de5-2b0a-478b-b52e-4994b412c7c1" containerName="ovn-config" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.052463 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0142de5-2b0a-478b-b52e-4994b412c7c1" containerName="ovn-config" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.052662 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0142de5-2b0a-478b-b52e-4994b412c7c1" containerName="ovn-config" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.053202 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-4643-account-create-update-rv8kg" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.054072 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f5c8-account-create-update-qhpqt" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.054949 4612 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-run\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.054988 4612 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.072767 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0142de5-2b0a-478b-b52e-4994b412c7c1-kube-api-access-lfnfl" (OuterVolumeSpecName: "kube-api-access-lfnfl") pod "d0142de5-2b0a-478b-b52e-4994b412c7c1" (UID: "d0142de5-2b0a-478b-b52e-4994b412c7c1"). InnerVolumeSpecName "kube-api-access-lfnfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.073287 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.055003 4612 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d0142de5-2b0a-478b-b52e-4994b412c7c1-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.073695 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0142de5-2b0a-478b-b52e-4994b412c7c1-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.073711 4612 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d0142de5-2b0a-478b-b52e-4994b412c7c1-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.088385 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-4643-account-create-update-rv8kg"] Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.149518 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-f2fmf" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.175237 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zst5\" (UniqueName: \"kubernetes.io/projected/70d803ac-6da1-49ea-b048-aa20f0062c44-kube-api-access-9zst5\") pod \"barbican-4643-account-create-update-rv8kg\" (UID: \"70d803ac-6da1-49ea-b048-aa20f0062c44\") " pod="openstack/barbican-4643-account-create-update-rv8kg" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.175334 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70d803ac-6da1-49ea-b048-aa20f0062c44-operator-scripts\") pod \"barbican-4643-account-create-update-rv8kg\" (UID: \"70d803ac-6da1-49ea-b048-aa20f0062c44\") " pod="openstack/barbican-4643-account-create-update-rv8kg" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.175456 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfnfl\" (UniqueName: \"kubernetes.io/projected/d0142de5-2b0a-478b-b52e-4994b412c7c1-kube-api-access-lfnfl\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.277286 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zst5\" (UniqueName: \"kubernetes.io/projected/70d803ac-6da1-49ea-b048-aa20f0062c44-kube-api-access-9zst5\") pod \"barbican-4643-account-create-update-rv8kg\" (UID: \"70d803ac-6da1-49ea-b048-aa20f0062c44\") " pod="openstack/barbican-4643-account-create-update-rv8kg" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.277750 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70d803ac-6da1-49ea-b048-aa20f0062c44-operator-scripts\") pod \"barbican-4643-account-create-update-rv8kg\" (UID: \"70d803ac-6da1-49ea-b048-aa20f0062c44\") " pod="openstack/barbican-4643-account-create-update-rv8kg" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.278928 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70d803ac-6da1-49ea-b048-aa20f0062c44-operator-scripts\") pod \"barbican-4643-account-create-update-rv8kg\" (UID: \"70d803ac-6da1-49ea-b048-aa20f0062c44\") " pod="openstack/barbican-4643-account-create-update-rv8kg" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.296997 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-sntq8"] Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.298396 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-sntq8" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.312674 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.314772 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.329689 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zst5\" (UniqueName: \"kubernetes.io/projected/70d803ac-6da1-49ea-b048-aa20f0062c44-kube-api-access-9zst5\") pod \"barbican-4643-account-create-update-rv8kg\" (UID: \"70d803ac-6da1-49ea-b048-aa20f0062c44\") " pod="openstack/barbican-4643-account-create-update-rv8kg" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.320199 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.320254 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-m29rd" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.379803 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pcn6\" (UniqueName: \"kubernetes.io/projected/c8d788a9-ac6b-4242-9ffc-26578617f984-kube-api-access-5pcn6\") pod \"keystone-db-sync-sntq8\" (UID: \"c8d788a9-ac6b-4242-9ffc-26578617f984\") " pod="openstack/keystone-db-sync-sntq8" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.380116 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8d788a9-ac6b-4242-9ffc-26578617f984-config-data\") pod \"keystone-db-sync-sntq8\" (UID: \"c8d788a9-ac6b-4242-9ffc-26578617f984\") " pod="openstack/keystone-db-sync-sntq8" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.380203 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8d788a9-ac6b-4242-9ffc-26578617f984-combined-ca-bundle\") pod \"keystone-db-sync-sntq8\" (UID: \"c8d788a9-ac6b-4242-9ffc-26578617f984\") " pod="openstack/keystone-db-sync-sntq8" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.406675 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-sntq8"] Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.433784 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-4643-account-create-update-rv8kg" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.452012 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-qzfmm"] Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.453021 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qzfmm" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.477495 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j7748-config-4ffzd" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.477855 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j7748-config-4ffzd" event={"ID":"d0142de5-2b0a-478b-b52e-4994b412c7c1","Type":"ContainerDied","Data":"a449552ef3a1ae80c9bbc7ff703d494ca90ae015a68832edf4222e22640af0a0"} Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.477913 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a449552ef3a1ae80c9bbc7ff703d494ca90ae015a68832edf4222e22640af0a0" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.483706 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/768893ad-00ba-4b59-96ba-b7a078879dbe-operator-scripts\") pod \"neutron-db-create-qzfmm\" (UID: \"768893ad-00ba-4b59-96ba-b7a078879dbe\") " pod="openstack/neutron-db-create-qzfmm" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.483792 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8d788a9-ac6b-4242-9ffc-26578617f984-combined-ca-bundle\") pod \"keystone-db-sync-sntq8\" (UID: \"c8d788a9-ac6b-4242-9ffc-26578617f984\") " pod="openstack/keystone-db-sync-sntq8" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.483895 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pcn6\" (UniqueName: \"kubernetes.io/projected/c8d788a9-ac6b-4242-9ffc-26578617f984-kube-api-access-5pcn6\") pod \"keystone-db-sync-sntq8\" (UID: \"c8d788a9-ac6b-4242-9ffc-26578617f984\") " pod="openstack/keystone-db-sync-sntq8" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.483960 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bglfw\" (UniqueName: \"kubernetes.io/projected/768893ad-00ba-4b59-96ba-b7a078879dbe-kube-api-access-bglfw\") pod \"neutron-db-create-qzfmm\" (UID: \"768893ad-00ba-4b59-96ba-b7a078879dbe\") " pod="openstack/neutron-db-create-qzfmm" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.484006 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8d788a9-ac6b-4242-9ffc-26578617f984-config-data\") pod \"keystone-db-sync-sntq8\" (UID: \"c8d788a9-ac6b-4242-9ffc-26578617f984\") " pod="openstack/keystone-db-sync-sntq8" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.488489 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8d788a9-ac6b-4242-9ffc-26578617f984-config-data\") pod \"keystone-db-sync-sntq8\" (UID: \"c8d788a9-ac6b-4242-9ffc-26578617f984\") " pod="openstack/keystone-db-sync-sntq8" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.491791 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qzfmm"] Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.502025 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8d788a9-ac6b-4242-9ffc-26578617f984-combined-ca-bundle\") pod \"keystone-db-sync-sntq8\" (UID: \"c8d788a9-ac6b-4242-9ffc-26578617f984\") " pod="openstack/keystone-db-sync-sntq8" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.552714 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pcn6\" (UniqueName: \"kubernetes.io/projected/c8d788a9-ac6b-4242-9ffc-26578617f984-kube-api-access-5pcn6\") pod \"keystone-db-sync-sntq8\" (UID: \"c8d788a9-ac6b-4242-9ffc-26578617f984\") " pod="openstack/keystone-db-sync-sntq8" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.587016 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/768893ad-00ba-4b59-96ba-b7a078879dbe-operator-scripts\") pod \"neutron-db-create-qzfmm\" (UID: \"768893ad-00ba-4b59-96ba-b7a078879dbe\") " pod="openstack/neutron-db-create-qzfmm" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.587854 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/768893ad-00ba-4b59-96ba-b7a078879dbe-operator-scripts\") pod \"neutron-db-create-qzfmm\" (UID: \"768893ad-00ba-4b59-96ba-b7a078879dbe\") " pod="openstack/neutron-db-create-qzfmm" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.595503 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bglfw\" (UniqueName: \"kubernetes.io/projected/768893ad-00ba-4b59-96ba-b7a078879dbe-kube-api-access-bglfw\") pod \"neutron-db-create-qzfmm\" (UID: \"768893ad-00ba-4b59-96ba-b7a078879dbe\") " pod="openstack/neutron-db-create-qzfmm" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.596789 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-558a-account-create-update-rnmtc"] Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.672470 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bglfw\" (UniqueName: \"kubernetes.io/projected/768893ad-00ba-4b59-96ba-b7a078879dbe-kube-api-access-bglfw\") pod \"neutron-db-create-qzfmm\" (UID: \"768893ad-00ba-4b59-96ba-b7a078879dbe\") " pod="openstack/neutron-db-create-qzfmm" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.672614 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-558a-account-create-update-rnmtc"] Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.672687 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-558a-account-create-update-rnmtc" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.685801 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.764055 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-sntq8" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.811383 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54282\" (UniqueName: \"kubernetes.io/projected/ea00badd-5625-42e1-a6af-9fad9903385a-kube-api-access-54282\") pod \"neutron-558a-account-create-update-rnmtc\" (UID: \"ea00badd-5625-42e1-a6af-9fad9903385a\") " pod="openstack/neutron-558a-account-create-update-rnmtc" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.811835 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea00badd-5625-42e1-a6af-9fad9903385a-operator-scripts\") pod \"neutron-558a-account-create-update-rnmtc\" (UID: \"ea00badd-5625-42e1-a6af-9fad9903385a\") " pod="openstack/neutron-558a-account-create-update-rnmtc" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.891845 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qzfmm" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.942405 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea00badd-5625-42e1-a6af-9fad9903385a-operator-scripts\") pod \"neutron-558a-account-create-update-rnmtc\" (UID: \"ea00badd-5625-42e1-a6af-9fad9903385a\") " pod="openstack/neutron-558a-account-create-update-rnmtc" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.942564 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54282\" (UniqueName: \"kubernetes.io/projected/ea00badd-5625-42e1-a6af-9fad9903385a-kube-api-access-54282\") pod \"neutron-558a-account-create-update-rnmtc\" (UID: \"ea00badd-5625-42e1-a6af-9fad9903385a\") " pod="openstack/neutron-558a-account-create-update-rnmtc" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.954643 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea00badd-5625-42e1-a6af-9fad9903385a-operator-scripts\") pod \"neutron-558a-account-create-update-rnmtc\" (UID: \"ea00badd-5625-42e1-a6af-9fad9903385a\") " pod="openstack/neutron-558a-account-create-update-rnmtc" Dec 03 07:44:31 crc kubenswrapper[4612]: I1203 07:44:31.994806 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54282\" (UniqueName: \"kubernetes.io/projected/ea00badd-5625-42e1-a6af-9fad9903385a-kube-api-access-54282\") pod \"neutron-558a-account-create-update-rnmtc\" (UID: \"ea00badd-5625-42e1-a6af-9fad9903385a\") " pod="openstack/neutron-558a-account-create-update-rnmtc" Dec 03 07:44:32 crc kubenswrapper[4612]: W1203 07:44:32.019373 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fb3251d_18b1_42f0_b048_3b7de91538a1.slice/crio-b1c8ded377fc7c203777be8248e88e9adaee32204189730fc0ff977ffa518879 WatchSource:0}: Error finding container b1c8ded377fc7c203777be8248e88e9adaee32204189730fc0ff977ffa518879: Status 404 returned error can't find the container with id b1c8ded377fc7c203777be8248e88e9adaee32204189730fc0ff977ffa518879 Dec 03 07:44:32 crc kubenswrapper[4612]: I1203 07:44:32.033914 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-rmxx5"] Dec 03 07:44:32 crc kubenswrapper[4612]: I1203 07:44:32.152624 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-j7748-config-4ffzd"] Dec 03 07:44:32 crc kubenswrapper[4612]: I1203 07:44:32.182114 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-j7748-config-4ffzd"] Dec 03 07:44:32 crc kubenswrapper[4612]: I1203 07:44:32.209475 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-558a-account-create-update-rnmtc" Dec 03 07:44:32 crc kubenswrapper[4612]: I1203 07:44:32.262774 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-f5c8-account-create-update-qhpqt"] Dec 03 07:44:32 crc kubenswrapper[4612]: I1203 07:44:32.286823 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-f2fmf"] Dec 03 07:44:32 crc kubenswrapper[4612]: I1203 07:44:32.508923 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-4643-account-create-update-rv8kg"] Dec 03 07:44:32 crc kubenswrapper[4612]: I1203 07:44:32.522639 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-rmxx5" event={"ID":"0fb3251d-18b1-42f0-b048-3b7de91538a1","Type":"ContainerStarted","Data":"b1c8ded377fc7c203777be8248e88e9adaee32204189730fc0ff977ffa518879"} Dec 03 07:44:32 crc kubenswrapper[4612]: I1203 07:44:32.524290 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-f5c8-account-create-update-qhpqt" event={"ID":"eb7ad4dc-78e8-4503-936c-dd76a4c73175","Type":"ContainerStarted","Data":"8faad34a50d312aaa6136c8731c07f7192a155faa9ba8fc67ed4665de184db4b"} Dec 03 07:44:32 crc kubenswrapper[4612]: I1203 07:44:32.532146 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-f2fmf" event={"ID":"91c6a1d5-8062-4e6c-b12e-a44ed73f7038","Type":"ContainerStarted","Data":"7170092cef228079a01f39c27ba92505e3a355bf6cb3ec6777bc2905efec55e9"} Dec 03 07:44:32 crc kubenswrapper[4612]: W1203 07:44:32.532508 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod70d803ac_6da1_49ea_b048_aa20f0062c44.slice/crio-ba84465e34b502075c5663db1087921dc78e0891e04b1c4804a86fbdef2b1cef WatchSource:0}: Error finding container ba84465e34b502075c5663db1087921dc78e0891e04b1c4804a86fbdef2b1cef: Status 404 returned error can't find the container with id ba84465e34b502075c5663db1087921dc78e0891e04b1c4804a86fbdef2b1cef Dec 03 07:44:32 crc kubenswrapper[4612]: I1203 07:44:32.603720 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"199ac340-6fa4-414c-b9b1-80aff6965bc0","Type":"ContainerStarted","Data":"a8f7be8bc2aaeb0195825aec13c273e3f8bcff972afcca19618b4d9f275d200b"} Dec 03 07:44:32 crc kubenswrapper[4612]: I1203 07:44:32.699016 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-sntq8"] Dec 03 07:44:32 crc kubenswrapper[4612]: I1203 07:44:32.699227 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=31.726487554 podStartE2EDuration="37.699197292s" podCreationTimestamp="2025-12-03 07:43:55 +0000 UTC" firstStartedPulling="2025-12-03 07:44:13.436259258 +0000 UTC m=+1016.609616658" lastFinishedPulling="2025-12-03 07:44:19.408968996 +0000 UTC m=+1022.582326396" observedRunningTime="2025-12-03 07:44:32.665894864 +0000 UTC m=+1035.839252294" watchObservedRunningTime="2025-12-03 07:44:32.699197292 +0000 UTC m=+1035.872554692" Dec 03 07:44:32 crc kubenswrapper[4612]: I1203 07:44:32.980520 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qzfmm"] Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.102521 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0142de5-2b0a-478b-b52e-4994b412c7c1" path="/var/lib/kubelet/pods/d0142de5-2b0a-478b-b52e-4994b412c7c1/volumes" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.139851 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-558a-account-create-update-rnmtc"] Dec 03 07:44:33 crc kubenswrapper[4612]: W1203 07:44:33.155761 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea00badd_5625_42e1_a6af_9fad9903385a.slice/crio-4fbbc5c3e619826a8aa51a67abce935887714bcc1418930285d17483fe9fc991 WatchSource:0}: Error finding container 4fbbc5c3e619826a8aa51a67abce935887714bcc1418930285d17483fe9fc991: Status 404 returned error can't find the container with id 4fbbc5c3e619826a8aa51a67abce935887714bcc1418930285d17483fe9fc991 Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.256627 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-4tdwk"] Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.258051 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.264622 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.291478 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-4tdwk"] Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.382425 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.382485 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-config\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.382803 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hxcn\" (UniqueName: \"kubernetes.io/projected/343a43d3-b0e6-44f3-8089-747916e7932c-kube-api-access-4hxcn\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.382870 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.383003 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.383064 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.484430 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.484804 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.484841 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.484866 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.484904 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-config\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.485126 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hxcn\" (UniqueName: \"kubernetes.io/projected/343a43d3-b0e6-44f3-8089-747916e7932c-kube-api-access-4hxcn\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.486020 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.486090 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.486144 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-config\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.486179 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.486559 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.540155 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hxcn\" (UniqueName: \"kubernetes.io/projected/343a43d3-b0e6-44f3-8089-747916e7932c-kube-api-access-4hxcn\") pod \"dnsmasq-dns-6d5b6d6b67-4tdwk\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.580443 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.625005 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-558a-account-create-update-rnmtc" event={"ID":"ea00badd-5625-42e1-a6af-9fad9903385a","Type":"ContainerStarted","Data":"5775852b47c2d79141158b14850e6be49fed7e207e5ef255aab7d76ef010ea89"} Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.625056 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-558a-account-create-update-rnmtc" event={"ID":"ea00badd-5625-42e1-a6af-9fad9903385a","Type":"ContainerStarted","Data":"4fbbc5c3e619826a8aa51a67abce935887714bcc1418930285d17483fe9fc991"} Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.629232 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qzfmm" event={"ID":"768893ad-00ba-4b59-96ba-b7a078879dbe","Type":"ContainerStarted","Data":"d39568f0f8b07f8b92cf961ea4bd9e26e8dd80551f62690944600c4ce063ec94"} Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.629268 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qzfmm" event={"ID":"768893ad-00ba-4b59-96ba-b7a078879dbe","Type":"ContainerStarted","Data":"1abd239cbbbf9f20af0bf4aa53c4f129429549e1480b0fbfaa84b88d1f676a5c"} Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.631229 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-sntq8" event={"ID":"c8d788a9-ac6b-4242-9ffc-26578617f984","Type":"ContainerStarted","Data":"fc46c854967f64b457e2f61f6c5f9d44c1c407043eb1ff72c8bcf1918b8aa35b"} Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.636044 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-4643-account-create-update-rv8kg" event={"ID":"70d803ac-6da1-49ea-b048-aa20f0062c44","Type":"ContainerStarted","Data":"700f22cf690f526e04937c5429bb99b695e01abc93bbb544b68976370bd88a20"} Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.636088 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-4643-account-create-update-rv8kg" event={"ID":"70d803ac-6da1-49ea-b048-aa20f0062c44","Type":"ContainerStarted","Data":"ba84465e34b502075c5663db1087921dc78e0891e04b1c4804a86fbdef2b1cef"} Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.649059 4612 generic.go:334] "Generic (PLEG): container finished" podID="0fb3251d-18b1-42f0-b048-3b7de91538a1" containerID="68a8c4a9d0cfbee2a5dc444e027d5c5545b26a60bbabf8cdb10022a177dd3a32" exitCode=0 Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.649121 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-rmxx5" event={"ID":"0fb3251d-18b1-42f0-b048-3b7de91538a1","Type":"ContainerDied","Data":"68a8c4a9d0cfbee2a5dc444e027d5c5545b26a60bbabf8cdb10022a177dd3a32"} Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.650458 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-wzkm4" event={"ID":"58391f1e-0f92-4c3c-844d-b74b3bfd1af6","Type":"ContainerStarted","Data":"319d2dac01f620e4579713fbb6e23f6a4dbf61b21db0822d9fd053bf9ec59fdf"} Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.652444 4612 generic.go:334] "Generic (PLEG): container finished" podID="eb7ad4dc-78e8-4503-936c-dd76a4c73175" containerID="b1c5bdb2d90adab8e54f15f74c762dc784eee615e00aa65c02d5d100d3cdf8e1" exitCode=0 Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.652480 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-f5c8-account-create-update-qhpqt" event={"ID":"eb7ad4dc-78e8-4503-936c-dd76a4c73175","Type":"ContainerDied","Data":"b1c5bdb2d90adab8e54f15f74c762dc784eee615e00aa65c02d5d100d3cdf8e1"} Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.655634 4612 generic.go:334] "Generic (PLEG): container finished" podID="91c6a1d5-8062-4e6c-b12e-a44ed73f7038" containerID="40611c7331b96df8812e898efe6b7c7c05259bb908a16c2a6cb38d29c7d6956d" exitCode=0 Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.656718 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-f2fmf" event={"ID":"91c6a1d5-8062-4e6c-b12e-a44ed73f7038","Type":"ContainerDied","Data":"40611c7331b96df8812e898efe6b7c7c05259bb908a16c2a6cb38d29c7d6956d"} Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.674228 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-558a-account-create-update-rnmtc" podStartSLOduration=2.674208244 podStartE2EDuration="2.674208244s" podCreationTimestamp="2025-12-03 07:44:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:44:33.664328134 +0000 UTC m=+1036.837685534" watchObservedRunningTime="2025-12-03 07:44:33.674208244 +0000 UTC m=+1036.847565644" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.744094 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-wzkm4" podStartSLOduration=4.251295534 podStartE2EDuration="19.744076889s" podCreationTimestamp="2025-12-03 07:44:14 +0000 UTC" firstStartedPulling="2025-12-03 07:44:15.442442665 +0000 UTC m=+1018.615800065" lastFinishedPulling="2025-12-03 07:44:30.93522402 +0000 UTC m=+1034.108581420" observedRunningTime="2025-12-03 07:44:33.706822045 +0000 UTC m=+1036.880179445" watchObservedRunningTime="2025-12-03 07:44:33.744076889 +0000 UTC m=+1036.917434289" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.824050 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-qzfmm" podStartSLOduration=2.824026598 podStartE2EDuration="2.824026598s" podCreationTimestamp="2025-12-03 07:44:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:44:33.81257069 +0000 UTC m=+1036.985928090" watchObservedRunningTime="2025-12-03 07:44:33.824026598 +0000 UTC m=+1036.997383998" Dec 03 07:44:33 crc kubenswrapper[4612]: I1203 07:44:33.847763 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-4643-account-create-update-rv8kg" podStartSLOduration=2.847744344 podStartE2EDuration="2.847744344s" podCreationTimestamp="2025-12-03 07:44:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:44:33.834903512 +0000 UTC m=+1037.008260912" watchObservedRunningTime="2025-12-03 07:44:33.847744344 +0000 UTC m=+1037.021101744" Dec 03 07:44:34 crc kubenswrapper[4612]: I1203 07:44:34.327792 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-4tdwk"] Dec 03 07:44:34 crc kubenswrapper[4612]: I1203 07:44:34.668106 4612 generic.go:334] "Generic (PLEG): container finished" podID="ea00badd-5625-42e1-a6af-9fad9903385a" containerID="5775852b47c2d79141158b14850e6be49fed7e207e5ef255aab7d76ef010ea89" exitCode=0 Dec 03 07:44:34 crc kubenswrapper[4612]: I1203 07:44:34.668306 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-558a-account-create-update-rnmtc" event={"ID":"ea00badd-5625-42e1-a6af-9fad9903385a","Type":"ContainerDied","Data":"5775852b47c2d79141158b14850e6be49fed7e207e5ef255aab7d76ef010ea89"} Dec 03 07:44:34 crc kubenswrapper[4612]: I1203 07:44:34.671719 4612 generic.go:334] "Generic (PLEG): container finished" podID="343a43d3-b0e6-44f3-8089-747916e7932c" containerID="a2eddebaa94aa7d16452dc6c2f7ec069a3a188fe1e57044b4da920ad09eee655" exitCode=0 Dec 03 07:44:34 crc kubenswrapper[4612]: I1203 07:44:34.671777 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" event={"ID":"343a43d3-b0e6-44f3-8089-747916e7932c","Type":"ContainerDied","Data":"a2eddebaa94aa7d16452dc6c2f7ec069a3a188fe1e57044b4da920ad09eee655"} Dec 03 07:44:34 crc kubenswrapper[4612]: I1203 07:44:34.671831 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" event={"ID":"343a43d3-b0e6-44f3-8089-747916e7932c","Type":"ContainerStarted","Data":"4b51aa3fc03fb1aa3c0ab58ce4a5e2d05cec2b008109819e04141017b0678190"} Dec 03 07:44:34 crc kubenswrapper[4612]: I1203 07:44:34.675170 4612 generic.go:334] "Generic (PLEG): container finished" podID="768893ad-00ba-4b59-96ba-b7a078879dbe" containerID="d39568f0f8b07f8b92cf961ea4bd9e26e8dd80551f62690944600c4ce063ec94" exitCode=0 Dec 03 07:44:34 crc kubenswrapper[4612]: I1203 07:44:34.675228 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qzfmm" event={"ID":"768893ad-00ba-4b59-96ba-b7a078879dbe","Type":"ContainerDied","Data":"d39568f0f8b07f8b92cf961ea4bd9e26e8dd80551f62690944600c4ce063ec94"} Dec 03 07:44:34 crc kubenswrapper[4612]: I1203 07:44:34.678291 4612 generic.go:334] "Generic (PLEG): container finished" podID="70d803ac-6da1-49ea-b048-aa20f0062c44" containerID="700f22cf690f526e04937c5429bb99b695e01abc93bbb544b68976370bd88a20" exitCode=0 Dec 03 07:44:34 crc kubenswrapper[4612]: I1203 07:44:34.679378 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-4643-account-create-update-rv8kg" event={"ID":"70d803ac-6da1-49ea-b048-aa20f0062c44","Type":"ContainerDied","Data":"700f22cf690f526e04937c5429bb99b695e01abc93bbb544b68976370bd88a20"} Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.299035 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-rmxx5" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.336493 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fb3251d-18b1-42f0-b048-3b7de91538a1-operator-scripts\") pod \"0fb3251d-18b1-42f0-b048-3b7de91538a1\" (UID: \"0fb3251d-18b1-42f0-b048-3b7de91538a1\") " Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.336588 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dz5w\" (UniqueName: \"kubernetes.io/projected/0fb3251d-18b1-42f0-b048-3b7de91538a1-kube-api-access-7dz5w\") pod \"0fb3251d-18b1-42f0-b048-3b7de91538a1\" (UID: \"0fb3251d-18b1-42f0-b048-3b7de91538a1\") " Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.342539 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fb3251d-18b1-42f0-b048-3b7de91538a1-kube-api-access-7dz5w" (OuterVolumeSpecName: "kube-api-access-7dz5w") pod "0fb3251d-18b1-42f0-b048-3b7de91538a1" (UID: "0fb3251d-18b1-42f0-b048-3b7de91538a1"). InnerVolumeSpecName "kube-api-access-7dz5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.342997 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fb3251d-18b1-42f0-b048-3b7de91538a1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0fb3251d-18b1-42f0-b048-3b7de91538a1" (UID: "0fb3251d-18b1-42f0-b048-3b7de91538a1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.444036 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fb3251d-18b1-42f0-b048-3b7de91538a1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.444064 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dz5w\" (UniqueName: \"kubernetes.io/projected/0fb3251d-18b1-42f0-b048-3b7de91538a1-kube-api-access-7dz5w\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.529221 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f5c8-account-create-update-qhpqt" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.546507 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-f2fmf" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.647905 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkzlt\" (UniqueName: \"kubernetes.io/projected/eb7ad4dc-78e8-4503-936c-dd76a4c73175-kube-api-access-nkzlt\") pod \"eb7ad4dc-78e8-4503-936c-dd76a4c73175\" (UID: \"eb7ad4dc-78e8-4503-936c-dd76a4c73175\") " Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.648163 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfm9h\" (UniqueName: \"kubernetes.io/projected/91c6a1d5-8062-4e6c-b12e-a44ed73f7038-kube-api-access-bfm9h\") pod \"91c6a1d5-8062-4e6c-b12e-a44ed73f7038\" (UID: \"91c6a1d5-8062-4e6c-b12e-a44ed73f7038\") " Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.648193 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91c6a1d5-8062-4e6c-b12e-a44ed73f7038-operator-scripts\") pod \"91c6a1d5-8062-4e6c-b12e-a44ed73f7038\" (UID: \"91c6a1d5-8062-4e6c-b12e-a44ed73f7038\") " Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.648714 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb7ad4dc-78e8-4503-936c-dd76a4c73175-operator-scripts\") pod \"eb7ad4dc-78e8-4503-936c-dd76a4c73175\" (UID: \"eb7ad4dc-78e8-4503-936c-dd76a4c73175\") " Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.648711 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91c6a1d5-8062-4e6c-b12e-a44ed73f7038-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "91c6a1d5-8062-4e6c-b12e-a44ed73f7038" (UID: "91c6a1d5-8062-4e6c-b12e-a44ed73f7038"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.649064 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91c6a1d5-8062-4e6c-b12e-a44ed73f7038-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.649086 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb7ad4dc-78e8-4503-936c-dd76a4c73175-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eb7ad4dc-78e8-4503-936c-dd76a4c73175" (UID: "eb7ad4dc-78e8-4503-936c-dd76a4c73175"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.661138 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb7ad4dc-78e8-4503-936c-dd76a4c73175-kube-api-access-nkzlt" (OuterVolumeSpecName: "kube-api-access-nkzlt") pod "eb7ad4dc-78e8-4503-936c-dd76a4c73175" (UID: "eb7ad4dc-78e8-4503-936c-dd76a4c73175"). InnerVolumeSpecName "kube-api-access-nkzlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.672037 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91c6a1d5-8062-4e6c-b12e-a44ed73f7038-kube-api-access-bfm9h" (OuterVolumeSpecName: "kube-api-access-bfm9h") pod "91c6a1d5-8062-4e6c-b12e-a44ed73f7038" (UID: "91c6a1d5-8062-4e6c-b12e-a44ed73f7038"). InnerVolumeSpecName "kube-api-access-bfm9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.695460 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-f5c8-account-create-update-qhpqt" event={"ID":"eb7ad4dc-78e8-4503-936c-dd76a4c73175","Type":"ContainerDied","Data":"8faad34a50d312aaa6136c8731c07f7192a155faa9ba8fc67ed4665de184db4b"} Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.695754 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8faad34a50d312aaa6136c8731c07f7192a155faa9ba8fc67ed4665de184db4b" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.695844 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f5c8-account-create-update-qhpqt" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.698602 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-f2fmf" event={"ID":"91c6a1d5-8062-4e6c-b12e-a44ed73f7038","Type":"ContainerDied","Data":"7170092cef228079a01f39c27ba92505e3a355bf6cb3ec6777bc2905efec55e9"} Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.698640 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7170092cef228079a01f39c27ba92505e3a355bf6cb3ec6777bc2905efec55e9" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.698617 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-f2fmf" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.701254 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" event={"ID":"343a43d3-b0e6-44f3-8089-747916e7932c","Type":"ContainerStarted","Data":"3a929e2e4c5fb428efd2e3e91f4b24090d3f569a3644c271fca0b25444b1b101"} Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.702196 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.708196 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-rmxx5" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.708655 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-rmxx5" event={"ID":"0fb3251d-18b1-42f0-b048-3b7de91538a1","Type":"ContainerDied","Data":"b1c8ded377fc7c203777be8248e88e9adaee32204189730fc0ff977ffa518879"} Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.708683 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1c8ded377fc7c203777be8248e88e9adaee32204189730fc0ff977ffa518879" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.735266 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" podStartSLOduration=2.7352488 podStartE2EDuration="2.7352488s" podCreationTimestamp="2025-12-03 07:44:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:44:35.723545006 +0000 UTC m=+1038.896902416" watchObservedRunningTime="2025-12-03 07:44:35.7352488 +0000 UTC m=+1038.908606200" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.751082 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkzlt\" (UniqueName: \"kubernetes.io/projected/eb7ad4dc-78e8-4503-936c-dd76a4c73175-kube-api-access-nkzlt\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.751111 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfm9h\" (UniqueName: \"kubernetes.io/projected/91c6a1d5-8062-4e6c-b12e-a44ed73f7038-kube-api-access-bfm9h\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:35 crc kubenswrapper[4612]: I1203 07:44:35.751123 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb7ad4dc-78e8-4503-936c-dd76a4c73175-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.041599 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-558a-account-create-update-rnmtc" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.071480 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-4643-account-create-update-rv8kg" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.083545 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qzfmm" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.161388 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea00badd-5625-42e1-a6af-9fad9903385a-operator-scripts\") pod \"ea00badd-5625-42e1-a6af-9fad9903385a\" (UID: \"ea00badd-5625-42e1-a6af-9fad9903385a\") " Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.161637 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zst5\" (UniqueName: \"kubernetes.io/projected/70d803ac-6da1-49ea-b048-aa20f0062c44-kube-api-access-9zst5\") pod \"70d803ac-6da1-49ea-b048-aa20f0062c44\" (UID: \"70d803ac-6da1-49ea-b048-aa20f0062c44\") " Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.161723 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54282\" (UniqueName: \"kubernetes.io/projected/ea00badd-5625-42e1-a6af-9fad9903385a-kube-api-access-54282\") pod \"ea00badd-5625-42e1-a6af-9fad9903385a\" (UID: \"ea00badd-5625-42e1-a6af-9fad9903385a\") " Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.161812 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/768893ad-00ba-4b59-96ba-b7a078879dbe-operator-scripts\") pod \"768893ad-00ba-4b59-96ba-b7a078879dbe\" (UID: \"768893ad-00ba-4b59-96ba-b7a078879dbe\") " Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.161891 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bglfw\" (UniqueName: \"kubernetes.io/projected/768893ad-00ba-4b59-96ba-b7a078879dbe-kube-api-access-bglfw\") pod \"768893ad-00ba-4b59-96ba-b7a078879dbe\" (UID: \"768893ad-00ba-4b59-96ba-b7a078879dbe\") " Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.161919 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70d803ac-6da1-49ea-b048-aa20f0062c44-operator-scripts\") pod \"70d803ac-6da1-49ea-b048-aa20f0062c44\" (UID: \"70d803ac-6da1-49ea-b048-aa20f0062c44\") " Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.161987 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea00badd-5625-42e1-a6af-9fad9903385a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ea00badd-5625-42e1-a6af-9fad9903385a" (UID: "ea00badd-5625-42e1-a6af-9fad9903385a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.162455 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea00badd-5625-42e1-a6af-9fad9903385a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.162860 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/768893ad-00ba-4b59-96ba-b7a078879dbe-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "768893ad-00ba-4b59-96ba-b7a078879dbe" (UID: "768893ad-00ba-4b59-96ba-b7a078879dbe"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.165652 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70d803ac-6da1-49ea-b048-aa20f0062c44-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "70d803ac-6da1-49ea-b048-aa20f0062c44" (UID: "70d803ac-6da1-49ea-b048-aa20f0062c44"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.166600 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea00badd-5625-42e1-a6af-9fad9903385a-kube-api-access-54282" (OuterVolumeSpecName: "kube-api-access-54282") pod "ea00badd-5625-42e1-a6af-9fad9903385a" (UID: "ea00badd-5625-42e1-a6af-9fad9903385a"). InnerVolumeSpecName "kube-api-access-54282". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.168442 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70d803ac-6da1-49ea-b048-aa20f0062c44-kube-api-access-9zst5" (OuterVolumeSpecName: "kube-api-access-9zst5") pod "70d803ac-6da1-49ea-b048-aa20f0062c44" (UID: "70d803ac-6da1-49ea-b048-aa20f0062c44"). InnerVolumeSpecName "kube-api-access-9zst5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.169158 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/768893ad-00ba-4b59-96ba-b7a078879dbe-kube-api-access-bglfw" (OuterVolumeSpecName: "kube-api-access-bglfw") pod "768893ad-00ba-4b59-96ba-b7a078879dbe" (UID: "768893ad-00ba-4b59-96ba-b7a078879dbe"). InnerVolumeSpecName "kube-api-access-bglfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.264823 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zst5\" (UniqueName: \"kubernetes.io/projected/70d803ac-6da1-49ea-b048-aa20f0062c44-kube-api-access-9zst5\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.264867 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54282\" (UniqueName: \"kubernetes.io/projected/ea00badd-5625-42e1-a6af-9fad9903385a-kube-api-access-54282\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.264884 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/768893ad-00ba-4b59-96ba-b7a078879dbe-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.264898 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bglfw\" (UniqueName: \"kubernetes.io/projected/768893ad-00ba-4b59-96ba-b7a078879dbe-kube-api-access-bglfw\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.264911 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70d803ac-6da1-49ea-b048-aa20f0062c44-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.717727 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qzfmm" event={"ID":"768893ad-00ba-4b59-96ba-b7a078879dbe","Type":"ContainerDied","Data":"1abd239cbbbf9f20af0bf4aa53c4f129429549e1480b0fbfaa84b88d1f676a5c"} Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.718194 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1abd239cbbbf9f20af0bf4aa53c4f129429549e1480b0fbfaa84b88d1f676a5c" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.717767 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qzfmm" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.719030 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-4643-account-create-update-rv8kg" event={"ID":"70d803ac-6da1-49ea-b048-aa20f0062c44","Type":"ContainerDied","Data":"ba84465e34b502075c5663db1087921dc78e0891e04b1c4804a86fbdef2b1cef"} Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.719068 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba84465e34b502075c5663db1087921dc78e0891e04b1c4804a86fbdef2b1cef" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.719076 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-4643-account-create-update-rv8kg" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.721670 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-558a-account-create-update-rnmtc" Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.723068 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-558a-account-create-update-rnmtc" event={"ID":"ea00badd-5625-42e1-a6af-9fad9903385a","Type":"ContainerDied","Data":"4fbbc5c3e619826a8aa51a67abce935887714bcc1418930285d17483fe9fc991"} Dec 03 07:44:36 crc kubenswrapper[4612]: I1203 07:44:36.723096 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4fbbc5c3e619826a8aa51a67abce935887714bcc1418930285d17483fe9fc991" Dec 03 07:44:41 crc kubenswrapper[4612]: I1203 07:44:41.768250 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-sntq8" event={"ID":"c8d788a9-ac6b-4242-9ffc-26578617f984","Type":"ContainerStarted","Data":"11067c81d6a6cb14cc8a9fc088579b182ea9765213525108e86817b3c865b59d"} Dec 03 07:44:41 crc kubenswrapper[4612]: I1203 07:44:41.793573 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-sntq8" podStartSLOduration=2.934712601 podStartE2EDuration="10.793554996s" podCreationTimestamp="2025-12-03 07:44:31 +0000 UTC" firstStartedPulling="2025-12-03 07:44:32.758031389 +0000 UTC m=+1035.931388789" lastFinishedPulling="2025-12-03 07:44:40.616873784 +0000 UTC m=+1043.790231184" observedRunningTime="2025-12-03 07:44:41.782533361 +0000 UTC m=+1044.955890791" watchObservedRunningTime="2025-12-03 07:44:41.793554996 +0000 UTC m=+1044.966912396" Dec 03 07:44:43 crc kubenswrapper[4612]: I1203 07:44:43.582318 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:44:43 crc kubenswrapper[4612]: I1203 07:44:43.702691 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-8xkg7"] Dec 03 07:44:43 crc kubenswrapper[4612]: I1203 07:44:43.702933 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" podUID="a9dfbe94-ffdc-4b45-9a25-782be58c3683" containerName="dnsmasq-dns" containerID="cri-o://b92ab2ab027867594ed256c3c3b3b44d6518a0b46b69de37d592bd73bbd3e2b3" gracePeriod=10 Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.366124 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.446179 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-config\") pod \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.446378 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-ovsdbserver-sb\") pod \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.446404 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wksx\" (UniqueName: \"kubernetes.io/projected/a9dfbe94-ffdc-4b45-9a25-782be58c3683-kube-api-access-5wksx\") pod \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.446469 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-ovsdbserver-nb\") pod \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.446535 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-dns-svc\") pod \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\" (UID: \"a9dfbe94-ffdc-4b45-9a25-782be58c3683\") " Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.459867 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9dfbe94-ffdc-4b45-9a25-782be58c3683-kube-api-access-5wksx" (OuterVolumeSpecName: "kube-api-access-5wksx") pod "a9dfbe94-ffdc-4b45-9a25-782be58c3683" (UID: "a9dfbe94-ffdc-4b45-9a25-782be58c3683"). InnerVolumeSpecName "kube-api-access-5wksx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.506065 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-config" (OuterVolumeSpecName: "config") pod "a9dfbe94-ffdc-4b45-9a25-782be58c3683" (UID: "a9dfbe94-ffdc-4b45-9a25-782be58c3683"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.512493 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a9dfbe94-ffdc-4b45-9a25-782be58c3683" (UID: "a9dfbe94-ffdc-4b45-9a25-782be58c3683"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.512531 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a9dfbe94-ffdc-4b45-9a25-782be58c3683" (UID: "a9dfbe94-ffdc-4b45-9a25-782be58c3683"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.517381 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a9dfbe94-ffdc-4b45-9a25-782be58c3683" (UID: "a9dfbe94-ffdc-4b45-9a25-782be58c3683"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.548688 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.548728 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wksx\" (UniqueName: \"kubernetes.io/projected/a9dfbe94-ffdc-4b45-9a25-782be58c3683-kube-api-access-5wksx\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.548745 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.548758 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.548769 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9dfbe94-ffdc-4b45-9a25-782be58c3683-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.790652 4612 generic.go:334] "Generic (PLEG): container finished" podID="58391f1e-0f92-4c3c-844d-b74b3bfd1af6" containerID="319d2dac01f620e4579713fbb6e23f6a4dbf61b21db0822d9fd053bf9ec59fdf" exitCode=0 Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.790727 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-wzkm4" event={"ID":"58391f1e-0f92-4c3c-844d-b74b3bfd1af6","Type":"ContainerDied","Data":"319d2dac01f620e4579713fbb6e23f6a4dbf61b21db0822d9fd053bf9ec59fdf"} Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.793269 4612 generic.go:334] "Generic (PLEG): container finished" podID="a9dfbe94-ffdc-4b45-9a25-782be58c3683" containerID="b92ab2ab027867594ed256c3c3b3b44d6518a0b46b69de37d592bd73bbd3e2b3" exitCode=0 Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.793334 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" event={"ID":"a9dfbe94-ffdc-4b45-9a25-782be58c3683","Type":"ContainerDied","Data":"b92ab2ab027867594ed256c3c3b3b44d6518a0b46b69de37d592bd73bbd3e2b3"} Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.793365 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" event={"ID":"a9dfbe94-ffdc-4b45-9a25-782be58c3683","Type":"ContainerDied","Data":"3209007a0f6ae5054d4ca885df9f5ab734cebf9fda0e2f131ebe448f6e7001f5"} Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.793388 4612 scope.go:117] "RemoveContainer" containerID="b92ab2ab027867594ed256c3c3b3b44d6518a0b46b69de37d592bd73bbd3e2b3" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.793392 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-8xkg7" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.795192 4612 generic.go:334] "Generic (PLEG): container finished" podID="c8d788a9-ac6b-4242-9ffc-26578617f984" containerID="11067c81d6a6cb14cc8a9fc088579b182ea9765213525108e86817b3c865b59d" exitCode=0 Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.795238 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-sntq8" event={"ID":"c8d788a9-ac6b-4242-9ffc-26578617f984","Type":"ContainerDied","Data":"11067c81d6a6cb14cc8a9fc088579b182ea9765213525108e86817b3c865b59d"} Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.813144 4612 scope.go:117] "RemoveContainer" containerID="051caf474e8cd78dde6f4d638bfebb677803865d73494495603a8bfe760fbd0c" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.844969 4612 scope.go:117] "RemoveContainer" containerID="b92ab2ab027867594ed256c3c3b3b44d6518a0b46b69de37d592bd73bbd3e2b3" Dec 03 07:44:44 crc kubenswrapper[4612]: E1203 07:44:44.845309 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b92ab2ab027867594ed256c3c3b3b44d6518a0b46b69de37d592bd73bbd3e2b3\": container with ID starting with b92ab2ab027867594ed256c3c3b3b44d6518a0b46b69de37d592bd73bbd3e2b3 not found: ID does not exist" containerID="b92ab2ab027867594ed256c3c3b3b44d6518a0b46b69de37d592bd73bbd3e2b3" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.845412 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b92ab2ab027867594ed256c3c3b3b44d6518a0b46b69de37d592bd73bbd3e2b3"} err="failed to get container status \"b92ab2ab027867594ed256c3c3b3b44d6518a0b46b69de37d592bd73bbd3e2b3\": rpc error: code = NotFound desc = could not find container \"b92ab2ab027867594ed256c3c3b3b44d6518a0b46b69de37d592bd73bbd3e2b3\": container with ID starting with b92ab2ab027867594ed256c3c3b3b44d6518a0b46b69de37d592bd73bbd3e2b3 not found: ID does not exist" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.845498 4612 scope.go:117] "RemoveContainer" containerID="051caf474e8cd78dde6f4d638bfebb677803865d73494495603a8bfe760fbd0c" Dec 03 07:44:44 crc kubenswrapper[4612]: E1203 07:44:44.846299 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"051caf474e8cd78dde6f4d638bfebb677803865d73494495603a8bfe760fbd0c\": container with ID starting with 051caf474e8cd78dde6f4d638bfebb677803865d73494495603a8bfe760fbd0c not found: ID does not exist" containerID="051caf474e8cd78dde6f4d638bfebb677803865d73494495603a8bfe760fbd0c" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.846323 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"051caf474e8cd78dde6f4d638bfebb677803865d73494495603a8bfe760fbd0c"} err="failed to get container status \"051caf474e8cd78dde6f4d638bfebb677803865d73494495603a8bfe760fbd0c\": rpc error: code = NotFound desc = could not find container \"051caf474e8cd78dde6f4d638bfebb677803865d73494495603a8bfe760fbd0c\": container with ID starting with 051caf474e8cd78dde6f4d638bfebb677803865d73494495603a8bfe760fbd0c not found: ID does not exist" Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.858965 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-8xkg7"] Dec 03 07:44:44 crc kubenswrapper[4612]: I1203 07:44:44.865215 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-8xkg7"] Dec 03 07:44:45 crc kubenswrapper[4612]: I1203 07:44:45.100816 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9dfbe94-ffdc-4b45-9a25-782be58c3683" path="/var/lib/kubelet/pods/a9dfbe94-ffdc-4b45-9a25-782be58c3683/volumes" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.122363 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-sntq8" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.179125 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8d788a9-ac6b-4242-9ffc-26578617f984-combined-ca-bundle\") pod \"c8d788a9-ac6b-4242-9ffc-26578617f984\" (UID: \"c8d788a9-ac6b-4242-9ffc-26578617f984\") " Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.179174 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8d788a9-ac6b-4242-9ffc-26578617f984-config-data\") pod \"c8d788a9-ac6b-4242-9ffc-26578617f984\" (UID: \"c8d788a9-ac6b-4242-9ffc-26578617f984\") " Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.179326 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pcn6\" (UniqueName: \"kubernetes.io/projected/c8d788a9-ac6b-4242-9ffc-26578617f984-kube-api-access-5pcn6\") pod \"c8d788a9-ac6b-4242-9ffc-26578617f984\" (UID: \"c8d788a9-ac6b-4242-9ffc-26578617f984\") " Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.187020 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8d788a9-ac6b-4242-9ffc-26578617f984-kube-api-access-5pcn6" (OuterVolumeSpecName: "kube-api-access-5pcn6") pod "c8d788a9-ac6b-4242-9ffc-26578617f984" (UID: "c8d788a9-ac6b-4242-9ffc-26578617f984"). InnerVolumeSpecName "kube-api-access-5pcn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.208087 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8d788a9-ac6b-4242-9ffc-26578617f984-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c8d788a9-ac6b-4242-9ffc-26578617f984" (UID: "c8d788a9-ac6b-4242-9ffc-26578617f984"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.224817 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8d788a9-ac6b-4242-9ffc-26578617f984-config-data" (OuterVolumeSpecName: "config-data") pod "c8d788a9-ac6b-4242-9ffc-26578617f984" (UID: "c8d788a9-ac6b-4242-9ffc-26578617f984"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.275271 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.281312 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8d788a9-ac6b-4242-9ffc-26578617f984-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.281336 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8d788a9-ac6b-4242-9ffc-26578617f984-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.281346 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pcn6\" (UniqueName: \"kubernetes.io/projected/c8d788a9-ac6b-4242-9ffc-26578617f984-kube-api-access-5pcn6\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.382144 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-config-data\") pod \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.382240 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-db-sync-config-data\") pod \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.382321 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-combined-ca-bundle\") pod \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.382415 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhjkb\" (UniqueName: \"kubernetes.io/projected/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-kube-api-access-rhjkb\") pod \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\" (UID: \"58391f1e-0f92-4c3c-844d-b74b3bfd1af6\") " Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.385581 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-kube-api-access-rhjkb" (OuterVolumeSpecName: "kube-api-access-rhjkb") pod "58391f1e-0f92-4c3c-844d-b74b3bfd1af6" (UID: "58391f1e-0f92-4c3c-844d-b74b3bfd1af6"). InnerVolumeSpecName "kube-api-access-rhjkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.385976 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "58391f1e-0f92-4c3c-844d-b74b3bfd1af6" (UID: "58391f1e-0f92-4c3c-844d-b74b3bfd1af6"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.406463 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "58391f1e-0f92-4c3c-844d-b74b3bfd1af6" (UID: "58391f1e-0f92-4c3c-844d-b74b3bfd1af6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.427914 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-config-data" (OuterVolumeSpecName: "config-data") pod "58391f1e-0f92-4c3c-844d-b74b3bfd1af6" (UID: "58391f1e-0f92-4c3c-844d-b74b3bfd1af6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.484445 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhjkb\" (UniqueName: \"kubernetes.io/projected/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-kube-api-access-rhjkb\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.484476 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.484489 4612 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.484497 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58391f1e-0f92-4c3c-844d-b74b3bfd1af6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.816524 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-sntq8" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.816525 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-sntq8" event={"ID":"c8d788a9-ac6b-4242-9ffc-26578617f984","Type":"ContainerDied","Data":"fc46c854967f64b457e2f61f6c5f9d44c1c407043eb1ff72c8bcf1918b8aa35b"} Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.816728 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc46c854967f64b457e2f61f6c5f9d44c1c407043eb1ff72c8bcf1918b8aa35b" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.817864 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-wzkm4" event={"ID":"58391f1e-0f92-4c3c-844d-b74b3bfd1af6","Type":"ContainerDied","Data":"e17310d05f8f0855c16b1c0d2b36a03ebd136487e8cf4280af0f4b7b52f38264"} Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.817900 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e17310d05f8f0855c16b1c0d2b36a03ebd136487e8cf4280af0f4b7b52f38264" Dec 03 07:44:46 crc kubenswrapper[4612]: I1203 07:44:46.817985 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-wzkm4" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.180623 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-s66tb"] Dec 03 07:44:47 crc kubenswrapper[4612]: E1203 07:44:47.180933 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fb3251d-18b1-42f0-b048-3b7de91538a1" containerName="mariadb-database-create" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.180969 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fb3251d-18b1-42f0-b048-3b7de91538a1" containerName="mariadb-database-create" Dec 03 07:44:47 crc kubenswrapper[4612]: E1203 07:44:47.180982 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8d788a9-ac6b-4242-9ffc-26578617f984" containerName="keystone-db-sync" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.180988 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8d788a9-ac6b-4242-9ffc-26578617f984" containerName="keystone-db-sync" Dec 03 07:44:47 crc kubenswrapper[4612]: E1203 07:44:47.181001 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9dfbe94-ffdc-4b45-9a25-782be58c3683" containerName="dnsmasq-dns" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181007 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9dfbe94-ffdc-4b45-9a25-782be58c3683" containerName="dnsmasq-dns" Dec 03 07:44:47 crc kubenswrapper[4612]: E1203 07:44:47.181019 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9dfbe94-ffdc-4b45-9a25-782be58c3683" containerName="init" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181025 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9dfbe94-ffdc-4b45-9a25-782be58c3683" containerName="init" Dec 03 07:44:47 crc kubenswrapper[4612]: E1203 07:44:47.181031 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91c6a1d5-8062-4e6c-b12e-a44ed73f7038" containerName="mariadb-database-create" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181037 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="91c6a1d5-8062-4e6c-b12e-a44ed73f7038" containerName="mariadb-database-create" Dec 03 07:44:47 crc kubenswrapper[4612]: E1203 07:44:47.181048 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="768893ad-00ba-4b59-96ba-b7a078879dbe" containerName="mariadb-database-create" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181055 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="768893ad-00ba-4b59-96ba-b7a078879dbe" containerName="mariadb-database-create" Dec 03 07:44:47 crc kubenswrapper[4612]: E1203 07:44:47.181074 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb7ad4dc-78e8-4503-936c-dd76a4c73175" containerName="mariadb-account-create-update" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181080 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb7ad4dc-78e8-4503-936c-dd76a4c73175" containerName="mariadb-account-create-update" Dec 03 07:44:47 crc kubenswrapper[4612]: E1203 07:44:47.181096 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea00badd-5625-42e1-a6af-9fad9903385a" containerName="mariadb-account-create-update" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181101 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea00badd-5625-42e1-a6af-9fad9903385a" containerName="mariadb-account-create-update" Dec 03 07:44:47 crc kubenswrapper[4612]: E1203 07:44:47.181122 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70d803ac-6da1-49ea-b048-aa20f0062c44" containerName="mariadb-account-create-update" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181128 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="70d803ac-6da1-49ea-b048-aa20f0062c44" containerName="mariadb-account-create-update" Dec 03 07:44:47 crc kubenswrapper[4612]: E1203 07:44:47.181138 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58391f1e-0f92-4c3c-844d-b74b3bfd1af6" containerName="glance-db-sync" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181144 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="58391f1e-0f92-4c3c-844d-b74b3bfd1af6" containerName="glance-db-sync" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181295 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8d788a9-ac6b-4242-9ffc-26578617f984" containerName="keystone-db-sync" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181313 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="91c6a1d5-8062-4e6c-b12e-a44ed73f7038" containerName="mariadb-database-create" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181324 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9dfbe94-ffdc-4b45-9a25-782be58c3683" containerName="dnsmasq-dns" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181336 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="768893ad-00ba-4b59-96ba-b7a078879dbe" containerName="mariadb-database-create" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181346 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb7ad4dc-78e8-4503-936c-dd76a4c73175" containerName="mariadb-account-create-update" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181355 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea00badd-5625-42e1-a6af-9fad9903385a" containerName="mariadb-account-create-update" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181363 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="58391f1e-0f92-4c3c-844d-b74b3bfd1af6" containerName="glance-db-sync" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181373 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="70d803ac-6da1-49ea-b048-aa20f0062c44" containerName="mariadb-account-create-update" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.181384 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fb3251d-18b1-42f0-b048-3b7de91538a1" containerName="mariadb-database-create" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.182235 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.207992 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-s66tb"] Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.308517 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-hpzvh"] Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.310596 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.314850 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-dns-svc\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.314890 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-config\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.315094 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmvfj\" (UniqueName: \"kubernetes.io/projected/0f3f287d-75e2-4662-bde8-0b5e58107efc-kube-api-access-wmvfj\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.315146 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-ovsdbserver-nb\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.315225 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-dns-swift-storage-0\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.315250 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-ovsdbserver-sb\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.319124 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.319292 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.319395 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.319505 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.319604 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-m29rd" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.326895 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-hpzvh"] Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.351843 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-s66tb"] Dec 03 07:44:47 crc kubenswrapper[4612]: E1203 07:44:47.352535 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc dns-swift-storage-0 kube-api-access-wmvfj ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" podUID="0f3f287d-75e2-4662-bde8-0b5e58107efc" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.392242 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-zrdwh"] Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.393745 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.409032 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-zrdwh"] Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.416459 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-credential-keys\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.416612 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-fernet-keys\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.416646 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmvfj\" (UniqueName: \"kubernetes.io/projected/0f3f287d-75e2-4662-bde8-0b5e58107efc-kube-api-access-wmvfj\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.416678 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-ovsdbserver-nb\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.416722 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-dns-swift-storage-0\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.416742 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-ovsdbserver-sb\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.416790 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-combined-ca-bundle\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.416827 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-dns-svc\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.416858 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-config\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.416902 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-scripts\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.416920 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-config-data\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.416959 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4hg9\" (UniqueName: \"kubernetes.io/projected/ab909a7e-9663-490b-9cc6-f533605dacac-kube-api-access-p4hg9\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.418289 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-dns-svc\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.418304 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-ovsdbserver-nb\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.418838 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-config\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.420426 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-dns-swift-storage-0\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.437762 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-ovsdbserver-sb\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.477310 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmvfj\" (UniqueName: \"kubernetes.io/projected/0f3f287d-75e2-4662-bde8-0b5e58107efc-kube-api-access-wmvfj\") pod \"dnsmasq-dns-6f8c45789f-s66tb\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.518229 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-combined-ca-bundle\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.518305 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74rvw\" (UniqueName: \"kubernetes.io/projected/3191a09d-22ea-4bcb-bd3a-f245f4041596-kube-api-access-74rvw\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.518343 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.518368 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.518413 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-scripts\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.518430 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-config-data\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.518450 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4hg9\" (UniqueName: \"kubernetes.io/projected/ab909a7e-9663-490b-9cc6-f533605dacac-kube-api-access-p4hg9\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.518478 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-credential-keys\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.518502 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.518521 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-fernet-keys\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.518563 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.518580 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-config\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.523859 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-config-data\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.524512 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-combined-ca-bundle\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.525588 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-credential-keys\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.529694 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-fernet-keys\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.537390 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-scripts\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.585854 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4hg9\" (UniqueName: \"kubernetes.io/projected/ab909a7e-9663-490b-9cc6-f533605dacac-kube-api-access-p4hg9\") pod \"keystone-bootstrap-hpzvh\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.620789 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.620831 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-config\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.620888 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74rvw\" (UniqueName: \"kubernetes.io/projected/3191a09d-22ea-4bcb-bd3a-f245f4041596-kube-api-access-74rvw\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.620915 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.620932 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.621349 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.621925 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.622049 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.622562 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.622729 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.623258 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-config\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.642139 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.654853 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7fcddc56bf-d6w6x"] Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.656361 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.672562 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.672747 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.672857 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-tzq7t" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.672976 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.721826 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7fcddc56bf-d6w6x"] Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.752850 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74rvw\" (UniqueName: \"kubernetes.io/projected/3191a09d-22ea-4bcb-bd3a-f245f4041596-kube-api-access-74rvw\") pod \"dnsmasq-dns-6c9c9f998c-zrdwh\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.803367 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-nvxkr"] Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.804463 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.810923 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.811235 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-7fvv9" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.811350 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.817564 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-nvxkr"] Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.824642 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98884a92-65ff-4910-ab6b-9161903018ad-logs\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.824693 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98884a92-65ff-4910-ab6b-9161903018ad-scripts\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.824737 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/98884a92-65ff-4910-ab6b-9161903018ad-config-data\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.824757 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/98884a92-65ff-4910-ab6b-9161903018ad-horizon-secret-key\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.824773 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knqbs\" (UniqueName: \"kubernetes.io/projected/98884a92-65ff-4910-ab6b-9161903018ad-kube-api-access-knqbs\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.857159 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.879328 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-4pvx9"] Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.883413 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4pvx9" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.903748 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-mq4ct" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.904003 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.905171 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.909077 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.910751 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4pvx9"] Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.928329 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/98884a92-65ff-4910-ab6b-9161903018ad-config-data\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.928379 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/98884a92-65ff-4910-ab6b-9161903018ad-horizon-secret-key\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.928402 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knqbs\" (UniqueName: \"kubernetes.io/projected/98884a92-65ff-4910-ab6b-9161903018ad-kube-api-access-knqbs\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.928423 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6f84q\" (UniqueName: \"kubernetes.io/projected/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-kube-api-access-6f84q\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.928441 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-etc-machine-id\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.928480 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-db-sync-config-data\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.928505 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-combined-ca-bundle\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.928570 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98884a92-65ff-4910-ab6b-9161903018ad-logs\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.928604 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98884a92-65ff-4910-ab6b-9161903018ad-scripts\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.928622 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-scripts\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.928644 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-config-data\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.933317 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/98884a92-65ff-4910-ab6b-9161903018ad-config-data\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.933566 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98884a92-65ff-4910-ab6b-9161903018ad-logs\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.936527 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98884a92-65ff-4910-ab6b-9161903018ad-scripts\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.940485 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-zrdwh"] Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.941052 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.959270 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/98884a92-65ff-4910-ab6b-9161903018ad-horizon-secret-key\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:47 crc kubenswrapper[4612]: I1203 07:44:47.963301 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knqbs\" (UniqueName: \"kubernetes.io/projected/98884a92-65ff-4910-ab6b-9161903018ad-kube-api-access-knqbs\") pod \"horizon-7fcddc56bf-d6w6x\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.003237 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-mhkbf"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.014845 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.020539 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-mhkbf"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.035399 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-dns-swift-storage-0\") pod \"0f3f287d-75e2-4662-bde8-0b5e58107efc\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.035449 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmvfj\" (UniqueName: \"kubernetes.io/projected/0f3f287d-75e2-4662-bde8-0b5e58107efc-kube-api-access-wmvfj\") pod \"0f3f287d-75e2-4662-bde8-0b5e58107efc\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.035519 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-config\") pod \"0f3f287d-75e2-4662-bde8-0b5e58107efc\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.035611 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-ovsdbserver-nb\") pod \"0f3f287d-75e2-4662-bde8-0b5e58107efc\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.035656 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-dns-svc\") pod \"0f3f287d-75e2-4662-bde8-0b5e58107efc\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.035719 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-ovsdbserver-sb\") pod \"0f3f287d-75e2-4662-bde8-0b5e58107efc\" (UID: \"0f3f287d-75e2-4662-bde8-0b5e58107efc\") " Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.035916 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6433f62-cce6-47e4-971f-9d568a1e0cb3-combined-ca-bundle\") pod \"neutron-db-sync-4pvx9\" (UID: \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\") " pod="openstack/neutron-db-sync-4pvx9" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.036171 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-db-sync-config-data\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.036199 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-combined-ca-bundle\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.036281 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6433f62-cce6-47e4-971f-9d568a1e0cb3-config\") pod \"neutron-db-sync-4pvx9\" (UID: \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\") " pod="openstack/neutron-db-sync-4pvx9" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.036319 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-scripts\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.036341 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-config-data\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.036380 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmk9d\" (UniqueName: \"kubernetes.io/projected/c6433f62-cce6-47e4-971f-9d568a1e0cb3-kube-api-access-lmk9d\") pod \"neutron-db-sync-4pvx9\" (UID: \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\") " pod="openstack/neutron-db-sync-4pvx9" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.036403 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6f84q\" (UniqueName: \"kubernetes.io/projected/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-kube-api-access-6f84q\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.036422 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-etc-machine-id\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.036501 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-etc-machine-id\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.036876 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0f3f287d-75e2-4662-bde8-0b5e58107efc" (UID: "0f3f287d-75e2-4662-bde8-0b5e58107efc"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.039235 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0f3f287d-75e2-4662-bde8-0b5e58107efc" (UID: "0f3f287d-75e2-4662-bde8-0b5e58107efc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.039539 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-config" (OuterVolumeSpecName: "config") pod "0f3f287d-75e2-4662-bde8-0b5e58107efc" (UID: "0f3f287d-75e2-4662-bde8-0b5e58107efc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.039821 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0f3f287d-75e2-4662-bde8-0b5e58107efc" (UID: "0f3f287d-75e2-4662-bde8-0b5e58107efc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.047715 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0f3f287d-75e2-4662-bde8-0b5e58107efc" (UID: "0f3f287d-75e2-4662-bde8-0b5e58107efc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.050820 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-config-data\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.065808 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-scripts\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.066316 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.068737 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-db-sync-config-data\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.076616 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6f84q\" (UniqueName: \"kubernetes.io/projected/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-kube-api-access-6f84q\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.079549 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-combined-ca-bundle\") pod \"cinder-db-sync-nvxkr\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.080473 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f3f287d-75e2-4662-bde8-0b5e58107efc-kube-api-access-wmvfj" (OuterVolumeSpecName: "kube-api-access-wmvfj") pod "0f3f287d-75e2-4662-bde8-0b5e58107efc" (UID: "0f3f287d-75e2-4662-bde8-0b5e58107efc"). InnerVolumeSpecName "kube-api-access-wmvfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.107999 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-pqqrz"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.109109 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.117178 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.117412 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.117621 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-9sf96" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.139439 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6433f62-cce6-47e4-971f-9d568a1e0cb3-config\") pod \"neutron-db-sync-4pvx9\" (UID: \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\") " pod="openstack/neutron-db-sync-4pvx9" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.139492 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.139527 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-config\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.139565 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmk9d\" (UniqueName: \"kubernetes.io/projected/c6433f62-cce6-47e4-971f-9d568a1e0cb3-kube-api-access-lmk9d\") pod \"neutron-db-sync-4pvx9\" (UID: \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\") " pod="openstack/neutron-db-sync-4pvx9" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.139593 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk7m8\" (UniqueName: \"kubernetes.io/projected/870f8c82-2018-4c5a-ad52-5066ef6211ad-kube-api-access-kk7m8\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.139712 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6433f62-cce6-47e4-971f-9d568a1e0cb3-combined-ca-bundle\") pod \"neutron-db-sync-4pvx9\" (UID: \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\") " pod="openstack/neutron-db-sync-4pvx9" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.139791 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.139877 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.140180 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.140302 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.140313 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.140338 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.140347 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.140357 4612 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0f3f287d-75e2-4662-bde8-0b5e58107efc-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.140365 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmvfj\" (UniqueName: \"kubernetes.io/projected/0f3f287d-75e2-4662-bde8-0b5e58107efc-kube-api-access-wmvfj\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.152794 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6433f62-cce6-47e4-971f-9d568a1e0cb3-config\") pod \"neutron-db-sync-4pvx9\" (UID: \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\") " pod="openstack/neutron-db-sync-4pvx9" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.153686 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-pqqrz"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.157970 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6433f62-cce6-47e4-971f-9d568a1e0cb3-combined-ca-bundle\") pod \"neutron-db-sync-4pvx9\" (UID: \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\") " pod="openstack/neutron-db-sync-4pvx9" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.158493 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.177489 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmk9d\" (UniqueName: \"kubernetes.io/projected/c6433f62-cce6-47e4-971f-9d568a1e0cb3-kube-api-access-lmk9d\") pod \"neutron-db-sync-4pvx9\" (UID: \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\") " pod="openstack/neutron-db-sync-4pvx9" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.246808 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-combined-ca-bundle\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.246851 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.246874 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-config-data\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.246903 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-scripts\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.246924 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-config\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.246996 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk7m8\" (UniqueName: \"kubernetes.io/projected/870f8c82-2018-4c5a-ad52-5066ef6211ad-kube-api-access-kk7m8\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.247046 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.247067 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk98f\" (UniqueName: \"kubernetes.io/projected/0600693c-3ac0-4d42-8efd-c3140c6474a8-kube-api-access-lk98f\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.247102 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.247142 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0600693c-3ac0-4d42-8efd-c3140c6474a8-logs\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.247168 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.248104 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.248725 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.249240 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-config\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.256464 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.256783 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.256975 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4pvx9" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.257148 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.267699 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.280745 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.280910 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.286129 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk7m8\" (UniqueName: \"kubernetes.io/projected/870f8c82-2018-4c5a-ad52-5066ef6211ad-kube-api-access-kk7m8\") pod \"dnsmasq-dns-57c957c4ff-mhkbf\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.326371 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-74d97d6945-6mlkf"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.374526 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-ptq9j"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.378418 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.392153 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd8zc\" (UniqueName: \"kubernetes.io/projected/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-kube-api-access-bd8zc\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.392243 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-combined-ca-bundle\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.392309 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-config-data\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.392386 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-log-httpd\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.392430 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-scripts\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.392519 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-scripts\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.392568 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-config-data\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.392617 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.392654 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.392704 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-run-httpd\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.392756 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk98f\" (UniqueName: \"kubernetes.io/projected/0600693c-3ac0-4d42-8efd-c3140c6474a8-kube-api-access-lk98f\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.392890 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0600693c-3ac0-4d42-8efd-c3140c6474a8-logs\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.419213 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-ptq9j" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.425839 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-p7657" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.428096 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.432879 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0600693c-3ac0-4d42-8efd-c3140c6474a8-logs\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.437022 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-config-data\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.437615 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-combined-ca-bundle\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.452123 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-scripts\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.489869 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk98f\" (UniqueName: \"kubernetes.io/projected/0600693c-3ac0-4d42-8efd-c3140c6474a8-kube-api-access-lk98f\") pod \"placement-db-sync-pqqrz\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.534757 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.534814 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/86a685b8-ba46-45f9-bcd7-07978507a53d-horizon-secret-key\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.534848 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-run-httpd\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.535013 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-combined-ca-bundle\") pod \"barbican-db-sync-ptq9j\" (UID: \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\") " pod="openstack/barbican-db-sync-ptq9j" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.535035 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd8zc\" (UniqueName: \"kubernetes.io/projected/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-kube-api-access-bd8zc\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.535116 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/86a685b8-ba46-45f9-bcd7-07978507a53d-config-data\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.535143 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-log-httpd\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.535158 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/86a685b8-ba46-45f9-bcd7-07978507a53d-scripts\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.535182 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j4vj\" (UniqueName: \"kubernetes.io/projected/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-kube-api-access-2j4vj\") pod \"barbican-db-sync-ptq9j\" (UID: \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\") " pod="openstack/barbican-db-sync-ptq9j" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.535196 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk7nx\" (UniqueName: \"kubernetes.io/projected/86a685b8-ba46-45f9-bcd7-07978507a53d-kube-api-access-mk7nx\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.535222 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86a685b8-ba46-45f9-bcd7-07978507a53d-logs\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.535296 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-scripts\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.535329 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-config-data\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.535349 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-db-sync-config-data\") pod \"barbican-db-sync-ptq9j\" (UID: \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\") " pod="openstack/barbican-db-sync-ptq9j" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.535381 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.543215 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-log-httpd\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.543722 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-run-httpd\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.550763 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-scripts\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.551251 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.566227 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-config-data\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.566992 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.567048 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-ptq9j"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.572858 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.592561 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.593846 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-pqqrz" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.593887 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd8zc\" (UniqueName: \"kubernetes.io/projected/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-kube-api-access-bd8zc\") pod \"ceilometer-0\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.628074 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-74d97d6945-6mlkf"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.637292 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/86a685b8-ba46-45f9-bcd7-07978507a53d-config-data\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.637355 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/86a685b8-ba46-45f9-bcd7-07978507a53d-scripts\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.637387 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j4vj\" (UniqueName: \"kubernetes.io/projected/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-kube-api-access-2j4vj\") pod \"barbican-db-sync-ptq9j\" (UID: \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\") " pod="openstack/barbican-db-sync-ptq9j" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.637411 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk7nx\" (UniqueName: \"kubernetes.io/projected/86a685b8-ba46-45f9-bcd7-07978507a53d-kube-api-access-mk7nx\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.637440 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86a685b8-ba46-45f9-bcd7-07978507a53d-logs\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.637496 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-db-sync-config-data\") pod \"barbican-db-sync-ptq9j\" (UID: \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\") " pod="openstack/barbican-db-sync-ptq9j" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.637541 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/86a685b8-ba46-45f9-bcd7-07978507a53d-horizon-secret-key\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.637625 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-combined-ca-bundle\") pod \"barbican-db-sync-ptq9j\" (UID: \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\") " pod="openstack/barbican-db-sync-ptq9j" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.638597 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86a685b8-ba46-45f9-bcd7-07978507a53d-logs\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.639277 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/86a685b8-ba46-45f9-bcd7-07978507a53d-scripts\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.643026 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/86a685b8-ba46-45f9-bcd7-07978507a53d-config-data\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.652738 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.653368 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/86a685b8-ba46-45f9-bcd7-07978507a53d-horizon-secret-key\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.653693 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-db-sync-config-data\") pod \"barbican-db-sync-ptq9j\" (UID: \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\") " pod="openstack/barbican-db-sync-ptq9j" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.654346 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.654649 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.658568 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-combined-ca-bundle\") pod \"barbican-db-sync-ptq9j\" (UID: \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\") " pod="openstack/barbican-db-sync-ptq9j" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.661874 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.675970 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.676468 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-d6pcb" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.676808 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.700028 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk7nx\" (UniqueName: \"kubernetes.io/projected/86a685b8-ba46-45f9-bcd7-07978507a53d-kube-api-access-mk7nx\") pod \"horizon-74d97d6945-6mlkf\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.700319 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j4vj\" (UniqueName: \"kubernetes.io/projected/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-kube-api-access-2j4vj\") pod \"barbican-db-sync-ptq9j\" (UID: \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\") " pod="openstack/barbican-db-sync-ptq9j" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.739016 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.739068 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/482ba975-80ee-4ff8-86de-9a3d589d1bf8-logs\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.739125 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/482ba975-80ee-4ff8-86de-9a3d589d1bf8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.739302 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.739343 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-config-data\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.739500 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-scripts\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.739599 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfj9w\" (UniqueName: \"kubernetes.io/projected/482ba975-80ee-4ff8-86de-9a3d589d1bf8-kube-api-access-mfj9w\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.747790 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.773333 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-ptq9j" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.777765 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-hpzvh"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.842020 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-scripts\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.842108 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfj9w\" (UniqueName: \"kubernetes.io/projected/482ba975-80ee-4ff8-86de-9a3d589d1bf8-kube-api-access-mfj9w\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.842132 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.842170 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/482ba975-80ee-4ff8-86de-9a3d589d1bf8-logs\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.842202 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/482ba975-80ee-4ff8-86de-9a3d589d1bf8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.842262 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.842280 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-config-data\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.856537 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.864633 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/482ba975-80ee-4ff8-86de-9a3d589d1bf8-logs\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.865552 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-config-data\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.865884 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/482ba975-80ee-4ff8-86de-9a3d589d1bf8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.866187 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.869365 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.875460 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.878234 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: W1203 07:44:48.882767 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab909a7e_9663_490b_9cc6_f533605dacac.slice/crio-d837fbf22db1651f296c04553cfd60826763125c62153457fedbb62ef7fd404b WatchSource:0}: Error finding container d837fbf22db1651f296c04553cfd60826763125c62153457fedbb62ef7fd404b: Status 404 returned error can't find the container with id d837fbf22db1651f296c04553cfd60826763125c62153457fedbb62ef7fd404b Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.885408 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.887402 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8c45789f-s66tb" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.889762 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-scripts\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.899199 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfj9w\" (UniqueName: \"kubernetes.io/projected/482ba975-80ee-4ff8-86de-9a3d589d1bf8-kube-api-access-mfj9w\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.909223 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " pod="openstack/glance-default-external-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.949334 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/193f36c9-49e4-4376-90bc-656c1ce4251d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.949448 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlfzc\" (UniqueName: \"kubernetes.io/projected/193f36c9-49e4-4376-90bc-656c1ce4251d-kube-api-access-tlfzc\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.949510 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/193f36c9-49e4-4376-90bc-656c1ce4251d-logs\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.949535 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.949579 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.949636 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.949698 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:48 crc kubenswrapper[4612]: I1203 07:44:48.983336 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.052290 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.052354 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/193f36c9-49e4-4376-90bc-656c1ce4251d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.052411 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlfzc\" (UniqueName: \"kubernetes.io/projected/193f36c9-49e4-4376-90bc-656c1ce4251d-kube-api-access-tlfzc\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.052435 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/193f36c9-49e4-4376-90bc-656c1ce4251d-logs\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.052451 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.052481 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.052500 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.052595 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.053315 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/193f36c9-49e4-4376-90bc-656c1ce4251d-logs\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.053534 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/193f36c9-49e4-4376-90bc-656c1ce4251d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.056388 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-s66tb"] Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.065013 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-s66tb"] Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.068379 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.073986 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-nvxkr"] Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.133356 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f3f287d-75e2-4662-bde8-0b5e58107efc" path="/var/lib/kubelet/pods/0f3f287d-75e2-4662-bde8-0b5e58107efc/volumes" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.147395 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlfzc\" (UniqueName: \"kubernetes.io/projected/193f36c9-49e4-4376-90bc-656c1ce4251d-kube-api-access-tlfzc\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.185719 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.186360 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.233304 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.253757 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.342719 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-zrdwh"] Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.491731 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4pvx9"] Dec 03 07:44:49 crc kubenswrapper[4612]: W1203 07:44:49.494750 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3191a09d_22ea_4bcb_bd3a_f245f4041596.slice/crio-394076574eaadb6626db92eb160521b83af672ba2736f9a9a392605506cb8226 WatchSource:0}: Error finding container 394076574eaadb6626db92eb160521b83af672ba2736f9a9a392605506cb8226: Status 404 returned error can't find the container with id 394076574eaadb6626db92eb160521b83af672ba2736f9a9a392605506cb8226 Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.550184 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7fcddc56bf-d6w6x"] Dec 03 07:44:49 crc kubenswrapper[4612]: W1203 07:44:49.571248 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98884a92_65ff_4910_ab6b_9161903018ad.slice/crio-f952c8f77326cbd4eea963867c133eebeb2067075d6e3b3b0fac84c94e5fb817 WatchSource:0}: Error finding container f952c8f77326cbd4eea963867c133eebeb2067075d6e3b3b0fac84c94e5fb817: Status 404 returned error can't find the container with id f952c8f77326cbd4eea963867c133eebeb2067075d6e3b3b0fac84c94e5fb817 Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.760557 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-mhkbf"] Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.803988 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-pqqrz"] Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.907099 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" event={"ID":"870f8c82-2018-4c5a-ad52-5066ef6211ad","Type":"ContainerStarted","Data":"e3c750b0332d764954268e63166efc3eb4718a67fb3ec72f0bd6494bb1ddafeb"} Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.913562 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hpzvh" event={"ID":"ab909a7e-9663-490b-9cc6-f533605dacac","Type":"ContainerStarted","Data":"e975ac6a0d365acd57746c87807445d16de1ad7a5dc638e4bceb129d51a5c361"} Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.913715 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hpzvh" event={"ID":"ab909a7e-9663-490b-9cc6-f533605dacac","Type":"ContainerStarted","Data":"d837fbf22db1651f296c04553cfd60826763125c62153457fedbb62ef7fd404b"} Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.919380 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-pqqrz" event={"ID":"0600693c-3ac0-4d42-8efd-c3140c6474a8","Type":"ContainerStarted","Data":"a7f1ea0b03956c55f1a2805e05d4d44d25588cd312228e18f4a0bcad3d595359"} Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.928301 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4pvx9" event={"ID":"c6433f62-cce6-47e4-971f-9d568a1e0cb3","Type":"ContainerStarted","Data":"fa07a2d1afec17105f3d847bf9bfa2146ee9651a272420b81a0b3ae36d7bf79a"} Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.929620 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-nvxkr" event={"ID":"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08","Type":"ContainerStarted","Data":"6157f35f6cbb9454508ddda22cfdaaea57a0748227cd9e2ccbffffb564cff6a4"} Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.931245 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7fcddc56bf-d6w6x" event={"ID":"98884a92-65ff-4910-ab6b-9161903018ad","Type":"ContainerStarted","Data":"f952c8f77326cbd4eea963867c133eebeb2067075d6e3b3b0fac84c94e5fb817"} Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.941081 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" event={"ID":"3191a09d-22ea-4bcb-bd3a-f245f4041596","Type":"ContainerStarted","Data":"394076574eaadb6626db92eb160521b83af672ba2736f9a9a392605506cb8226"} Dec 03 07:44:49 crc kubenswrapper[4612]: I1203 07:44:49.944297 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-hpzvh" podStartSLOduration=2.9442770080000003 podStartE2EDuration="2.944277008s" podCreationTimestamp="2025-12-03 07:44:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:44:49.93675472 +0000 UTC m=+1053.110112120" watchObservedRunningTime="2025-12-03 07:44:49.944277008 +0000 UTC m=+1053.117634408" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.334101 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.374142 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-74d97d6945-6mlkf"] Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.385111 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-ptq9j"] Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.462610 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.609627 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7fcddc56bf-d6w6x"] Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.618577 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.649853 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-dccb9d97-l9zsv"] Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.651741 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.712444 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a95a6747-6478-4737-8930-471e37160f38-scripts\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.712566 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rn2x\" (UniqueName: \"kubernetes.io/projected/a95a6747-6478-4737-8930-471e37160f38-kube-api-access-2rn2x\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.712620 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a95a6747-6478-4737-8930-471e37160f38-horizon-secret-key\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.712661 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a95a6747-6478-4737-8930-471e37160f38-logs\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.712685 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a95a6747-6478-4737-8930-471e37160f38-config-data\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.731997 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-dccb9d97-l9zsv"] Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.773115 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.807177 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.814224 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a95a6747-6478-4737-8930-471e37160f38-scripts\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.814313 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rn2x\" (UniqueName: \"kubernetes.io/projected/a95a6747-6478-4737-8930-471e37160f38-kube-api-access-2rn2x\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.814349 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a95a6747-6478-4737-8930-471e37160f38-horizon-secret-key\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.814378 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a95a6747-6478-4737-8930-471e37160f38-logs\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.814397 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a95a6747-6478-4737-8930-471e37160f38-config-data\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.815629 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a95a6747-6478-4737-8930-471e37160f38-config-data\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.816050 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a95a6747-6478-4737-8930-471e37160f38-scripts\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.816705 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a95a6747-6478-4737-8930-471e37160f38-logs\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: W1203 07:44:50.818836 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod482ba975_80ee_4ff8_86de_9a3d589d1bf8.slice/crio-5aab081ad0eb6c57ac46c424111ebea21f273328fa0aad590d486a3eee96dd85 WatchSource:0}: Error finding container 5aab081ad0eb6c57ac46c424111ebea21f273328fa0aad590d486a3eee96dd85: Status 404 returned error can't find the container with id 5aab081ad0eb6c57ac46c424111ebea21f273328fa0aad590d486a3eee96dd85 Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.837739 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a95a6747-6478-4737-8930-471e37160f38-horizon-secret-key\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.874039 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rn2x\" (UniqueName: \"kubernetes.io/projected/a95a6747-6478-4737-8930-471e37160f38-kube-api-access-2rn2x\") pod \"horizon-dccb9d97-l9zsv\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.962932 4612 generic.go:334] "Generic (PLEG): container finished" podID="870f8c82-2018-4c5a-ad52-5066ef6211ad" containerID="3a062c566bfd12e28457e85363de64510aec37db09ccc863d19f02c76647b169" exitCode=0 Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.963011 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" event={"ID":"870f8c82-2018-4c5a-ad52-5066ef6211ad","Type":"ContainerDied","Data":"3a062c566bfd12e28457e85363de64510aec37db09ccc863d19f02c76647b169"} Dec 03 07:44:50 crc kubenswrapper[4612]: I1203 07:44:50.977658 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"482ba975-80ee-4ff8-86de-9a3d589d1bf8","Type":"ContainerStarted","Data":"5aab081ad0eb6c57ac46c424111ebea21f273328fa0aad590d486a3eee96dd85"} Dec 03 07:44:51 crc kubenswrapper[4612]: I1203 07:44:51.012514 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-74d97d6945-6mlkf" event={"ID":"86a685b8-ba46-45f9-bcd7-07978507a53d","Type":"ContainerStarted","Data":"4060304c795cb1b5e1f71188c8cba307b3927cb90e8b02e67c26e664057d7800"} Dec 03 07:44:51 crc kubenswrapper[4612]: I1203 07:44:51.014335 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:44:51 crc kubenswrapper[4612]: I1203 07:44:51.027388 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4pvx9" event={"ID":"c6433f62-cce6-47e4-971f-9d568a1e0cb3","Type":"ContainerStarted","Data":"f9db1bd3ad19cd9f1dab9636c058ac499116de875fc34ce04cb474d5a211da77"} Dec 03 07:44:51 crc kubenswrapper[4612]: I1203 07:44:51.033498 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8","Type":"ContainerStarted","Data":"80eac5d153f2975edce2cf89aeeb857c078a15cd302880eda508e12f8c4b319f"} Dec 03 07:44:51 crc kubenswrapper[4612]: I1203 07:44:51.036716 4612 generic.go:334] "Generic (PLEG): container finished" podID="3191a09d-22ea-4bcb-bd3a-f245f4041596" containerID="0b9cfdac3babbef75a7e4472c2390cbc5dd755e5c04f2727e6ecf670678324d1" exitCode=0 Dec 03 07:44:51 crc kubenswrapper[4612]: I1203 07:44:51.036770 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" event={"ID":"3191a09d-22ea-4bcb-bd3a-f245f4041596","Type":"ContainerDied","Data":"0b9cfdac3babbef75a7e4472c2390cbc5dd755e5c04f2727e6ecf670678324d1"} Dec 03 07:44:51 crc kubenswrapper[4612]: I1203 07:44:51.041363 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-ptq9j" event={"ID":"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff","Type":"ContainerStarted","Data":"95424c55a2378c812ba53868d286f36cd406538b31d34703d26a7483a3a95182"} Dec 03 07:44:51 crc kubenswrapper[4612]: I1203 07:44:51.102627 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-4pvx9" podStartSLOduration=4.102610083 podStartE2EDuration="4.102610083s" podCreationTimestamp="2025-12-03 07:44:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:44:51.049274504 +0000 UTC m=+1054.222631904" watchObservedRunningTime="2025-12-03 07:44:51.102610083 +0000 UTC m=+1054.275967483" Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.013142 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.065270 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"193f36c9-49e4-4376-90bc-656c1ce4251d","Type":"ContainerStarted","Data":"2249112677f47ae6f3386588dc4a78322a7f27b7ddf0d6fb2f76265071ec5ab0"} Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.365693 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.451658 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-dns-swift-storage-0\") pod \"3191a09d-22ea-4bcb-bd3a-f245f4041596\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.452094 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-ovsdbserver-nb\") pod \"3191a09d-22ea-4bcb-bd3a-f245f4041596\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.452177 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-dns-svc\") pod \"3191a09d-22ea-4bcb-bd3a-f245f4041596\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.452289 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-config\") pod \"3191a09d-22ea-4bcb-bd3a-f245f4041596\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.452323 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74rvw\" (UniqueName: \"kubernetes.io/projected/3191a09d-22ea-4bcb-bd3a-f245f4041596-kube-api-access-74rvw\") pod \"3191a09d-22ea-4bcb-bd3a-f245f4041596\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.452352 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-ovsdbserver-sb\") pod \"3191a09d-22ea-4bcb-bd3a-f245f4041596\" (UID: \"3191a09d-22ea-4bcb-bd3a-f245f4041596\") " Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.488720 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-dccb9d97-l9zsv"] Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.498823 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3191a09d-22ea-4bcb-bd3a-f245f4041596-kube-api-access-74rvw" (OuterVolumeSpecName: "kube-api-access-74rvw") pod "3191a09d-22ea-4bcb-bd3a-f245f4041596" (UID: "3191a09d-22ea-4bcb-bd3a-f245f4041596"). InnerVolumeSpecName "kube-api-access-74rvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.505004 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3191a09d-22ea-4bcb-bd3a-f245f4041596" (UID: "3191a09d-22ea-4bcb-bd3a-f245f4041596"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.528696 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3191a09d-22ea-4bcb-bd3a-f245f4041596" (UID: "3191a09d-22ea-4bcb-bd3a-f245f4041596"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.555209 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.555232 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74rvw\" (UniqueName: \"kubernetes.io/projected/3191a09d-22ea-4bcb-bd3a-f245f4041596-kube-api-access-74rvw\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.555243 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.559048 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3191a09d-22ea-4bcb-bd3a-f245f4041596" (UID: "3191a09d-22ea-4bcb-bd3a-f245f4041596"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.564131 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3191a09d-22ea-4bcb-bd3a-f245f4041596" (UID: "3191a09d-22ea-4bcb-bd3a-f245f4041596"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.576964 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-config" (OuterVolumeSpecName: "config") pod "3191a09d-22ea-4bcb-bd3a-f245f4041596" (UID: "3191a09d-22ea-4bcb-bd3a-f245f4041596"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.656387 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.656422 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:52 crc kubenswrapper[4612]: I1203 07:44:52.656433 4612 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3191a09d-22ea-4bcb-bd3a-f245f4041596-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 07:44:53 crc kubenswrapper[4612]: I1203 07:44:53.085222 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dccb9d97-l9zsv" event={"ID":"a95a6747-6478-4737-8930-471e37160f38","Type":"ContainerStarted","Data":"668b5beaa978b48d928744711b1147c27909677b017c37d4396026717fe2fa50"} Dec 03 07:44:53 crc kubenswrapper[4612]: I1203 07:44:53.103290 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:53 crc kubenswrapper[4612]: I1203 07:44:53.103324 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" event={"ID":"870f8c82-2018-4c5a-ad52-5066ef6211ad","Type":"ContainerStarted","Data":"b276ec1289350e95461371f20320dba8c5b7f3c490d23b097491b6a923a3fa1e"} Dec 03 07:44:53 crc kubenswrapper[4612]: I1203 07:44:53.108808 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"482ba975-80ee-4ff8-86de-9a3d589d1bf8","Type":"ContainerStarted","Data":"dd19c7c94df363f7928866c2496088fb9f606a33e2be09483da6119612b968c2"} Dec 03 07:44:53 crc kubenswrapper[4612]: I1203 07:44:53.113115 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" event={"ID":"3191a09d-22ea-4bcb-bd3a-f245f4041596","Type":"ContainerDied","Data":"394076574eaadb6626db92eb160521b83af672ba2736f9a9a392605506cb8226"} Dec 03 07:44:53 crc kubenswrapper[4612]: I1203 07:44:53.113154 4612 scope.go:117] "RemoveContainer" containerID="0b9cfdac3babbef75a7e4472c2390cbc5dd755e5c04f2727e6ecf670678324d1" Dec 03 07:44:53 crc kubenswrapper[4612]: I1203 07:44:53.113272 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-zrdwh" Dec 03 07:44:53 crc kubenswrapper[4612]: I1203 07:44:53.130981 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" podStartSLOduration=6.130962488 podStartE2EDuration="6.130962488s" podCreationTimestamp="2025-12-03 07:44:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:44:53.120547069 +0000 UTC m=+1056.293904479" watchObservedRunningTime="2025-12-03 07:44:53.130962488 +0000 UTC m=+1056.304319908" Dec 03 07:44:53 crc kubenswrapper[4612]: I1203 07:44:53.255595 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-zrdwh"] Dec 03 07:44:53 crc kubenswrapper[4612]: I1203 07:44:53.271659 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-zrdwh"] Dec 03 07:44:54 crc kubenswrapper[4612]: I1203 07:44:54.133237 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"482ba975-80ee-4ff8-86de-9a3d589d1bf8","Type":"ContainerStarted","Data":"a4ed634c56c37b93b779247db973cf0e3967496ec549a889529dc8ea9a4b21bd"} Dec 03 07:44:54 crc kubenswrapper[4612]: I1203 07:44:54.133514 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="482ba975-80ee-4ff8-86de-9a3d589d1bf8" containerName="glance-log" containerID="cri-o://dd19c7c94df363f7928866c2496088fb9f606a33e2be09483da6119612b968c2" gracePeriod=30 Dec 03 07:44:54 crc kubenswrapper[4612]: I1203 07:44:54.133930 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="482ba975-80ee-4ff8-86de-9a3d589d1bf8" containerName="glance-httpd" containerID="cri-o://a4ed634c56c37b93b779247db973cf0e3967496ec549a889529dc8ea9a4b21bd" gracePeriod=30 Dec 03 07:44:54 crc kubenswrapper[4612]: I1203 07:44:54.144737 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"193f36c9-49e4-4376-90bc-656c1ce4251d","Type":"ContainerStarted","Data":"a20d83d70160a8b5dbd9fb5e69694389f14c0cc9db2574d6cadd922fde879e58"} Dec 03 07:44:55 crc kubenswrapper[4612]: I1203 07:44:55.109877 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3191a09d-22ea-4bcb-bd3a-f245f4041596" path="/var/lib/kubelet/pods/3191a09d-22ea-4bcb-bd3a-f245f4041596/volumes" Dec 03 07:44:55 crc kubenswrapper[4612]: I1203 07:44:55.160022 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"193f36c9-49e4-4376-90bc-656c1ce4251d","Type":"ContainerStarted","Data":"c8e752b04e416e686fcaf60eb8827ac39d2ee91550d7e697813aad129eadb20b"} Dec 03 07:44:55 crc kubenswrapper[4612]: I1203 07:44:55.160127 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="193f36c9-49e4-4376-90bc-656c1ce4251d" containerName="glance-httpd" containerID="cri-o://c8e752b04e416e686fcaf60eb8827ac39d2ee91550d7e697813aad129eadb20b" gracePeriod=30 Dec 03 07:44:55 crc kubenswrapper[4612]: I1203 07:44:55.160109 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="193f36c9-49e4-4376-90bc-656c1ce4251d" containerName="glance-log" containerID="cri-o://a20d83d70160a8b5dbd9fb5e69694389f14c0cc9db2574d6cadd922fde879e58" gracePeriod=30 Dec 03 07:44:55 crc kubenswrapper[4612]: I1203 07:44:55.168207 4612 generic.go:334] "Generic (PLEG): container finished" podID="482ba975-80ee-4ff8-86de-9a3d589d1bf8" containerID="a4ed634c56c37b93b779247db973cf0e3967496ec549a889529dc8ea9a4b21bd" exitCode=143 Dec 03 07:44:55 crc kubenswrapper[4612]: I1203 07:44:55.168238 4612 generic.go:334] "Generic (PLEG): container finished" podID="482ba975-80ee-4ff8-86de-9a3d589d1bf8" containerID="dd19c7c94df363f7928866c2496088fb9f606a33e2be09483da6119612b968c2" exitCode=143 Dec 03 07:44:55 crc kubenswrapper[4612]: I1203 07:44:55.168261 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"482ba975-80ee-4ff8-86de-9a3d589d1bf8","Type":"ContainerDied","Data":"a4ed634c56c37b93b779247db973cf0e3967496ec549a889529dc8ea9a4b21bd"} Dec 03 07:44:55 crc kubenswrapper[4612]: I1203 07:44:55.168339 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"482ba975-80ee-4ff8-86de-9a3d589d1bf8","Type":"ContainerDied","Data":"dd19c7c94df363f7928866c2496088fb9f606a33e2be09483da6119612b968c2"} Dec 03 07:44:55 crc kubenswrapper[4612]: I1203 07:44:55.180926 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.180906842 podStartE2EDuration="7.180906842s" podCreationTimestamp="2025-12-03 07:44:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:44:54.158473084 +0000 UTC m=+1057.331830554" watchObservedRunningTime="2025-12-03 07:44:55.180906842 +0000 UTC m=+1058.354264242" Dec 03 07:44:55 crc kubenswrapper[4612]: I1203 07:44:55.182658 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=8.182648606 podStartE2EDuration="8.182648606s" podCreationTimestamp="2025-12-03 07:44:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:44:55.182053341 +0000 UTC m=+1058.355410741" watchObservedRunningTime="2025-12-03 07:44:55.182648606 +0000 UTC m=+1058.356006016" Dec 03 07:44:56 crc kubenswrapper[4612]: I1203 07:44:56.182311 4612 generic.go:334] "Generic (PLEG): container finished" podID="193f36c9-49e4-4376-90bc-656c1ce4251d" containerID="c8e752b04e416e686fcaf60eb8827ac39d2ee91550d7e697813aad129eadb20b" exitCode=0 Dec 03 07:44:56 crc kubenswrapper[4612]: I1203 07:44:56.182341 4612 generic.go:334] "Generic (PLEG): container finished" podID="193f36c9-49e4-4376-90bc-656c1ce4251d" containerID="a20d83d70160a8b5dbd9fb5e69694389f14c0cc9db2574d6cadd922fde879e58" exitCode=143 Dec 03 07:44:56 crc kubenswrapper[4612]: I1203 07:44:56.182366 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"193f36c9-49e4-4376-90bc-656c1ce4251d","Type":"ContainerDied","Data":"c8e752b04e416e686fcaf60eb8827ac39d2ee91550d7e697813aad129eadb20b"} Dec 03 07:44:56 crc kubenswrapper[4612]: I1203 07:44:56.182442 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"193f36c9-49e4-4376-90bc-656c1ce4251d","Type":"ContainerDied","Data":"a20d83d70160a8b5dbd9fb5e69694389f14c0cc9db2574d6cadd922fde879e58"} Dec 03 07:44:58 crc kubenswrapper[4612]: I1203 07:44:58.208638 4612 generic.go:334] "Generic (PLEG): container finished" podID="ab909a7e-9663-490b-9cc6-f533605dacac" containerID="e975ac6a0d365acd57746c87807445d16de1ad7a5dc638e4bceb129d51a5c361" exitCode=0 Dec 03 07:44:58 crc kubenswrapper[4612]: I1203 07:44:58.208724 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hpzvh" event={"ID":"ab909a7e-9663-490b-9cc6-f533605dacac","Type":"ContainerDied","Data":"e975ac6a0d365acd57746c87807445d16de1ad7a5dc638e4bceb129d51a5c361"} Dec 03 07:44:58 crc kubenswrapper[4612]: I1203 07:44:58.575196 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:44:58 crc kubenswrapper[4612]: I1203 07:44:58.648326 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-4tdwk"] Dec 03 07:44:58 crc kubenswrapper[4612]: I1203 07:44:58.648571 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" podUID="343a43d3-b0e6-44f3-8089-747916e7932c" containerName="dnsmasq-dns" containerID="cri-o://3a929e2e4c5fb428efd2e3e91f4b24090d3f569a3644c271fca0b25444b1b101" gracePeriod=10 Dec 03 07:44:59 crc kubenswrapper[4612]: I1203 07:44:59.227744 4612 generic.go:334] "Generic (PLEG): container finished" podID="343a43d3-b0e6-44f3-8089-747916e7932c" containerID="3a929e2e4c5fb428efd2e3e91f4b24090d3f569a3644c271fca0b25444b1b101" exitCode=0 Dec 03 07:44:59 crc kubenswrapper[4612]: I1203 07:44:59.228206 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" event={"ID":"343a43d3-b0e6-44f3-8089-747916e7932c","Type":"ContainerDied","Data":"3a929e2e4c5fb428efd2e3e91f4b24090d3f569a3644c271fca0b25444b1b101"} Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.096424 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-74d97d6945-6mlkf"] Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.119987 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-767d79bd88-5spkc"] Dec 03 07:45:00 crc kubenswrapper[4612]: E1203 07:45:00.120520 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3191a09d-22ea-4bcb-bd3a-f245f4041596" containerName="init" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.120589 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3191a09d-22ea-4bcb-bd3a-f245f4041596" containerName="init" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.120827 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="3191a09d-22ea-4bcb-bd3a-f245f4041596" containerName="init" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.121769 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.123851 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.141243 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8xmq\" (UniqueName: \"kubernetes.io/projected/7462fb55-15b7-4416-b34f-23893766b5ed-kube-api-access-l8xmq\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.142848 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-767d79bd88-5spkc"] Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.156552 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7462fb55-15b7-4416-b34f-23893766b5ed-logs\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.156721 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-horizon-secret-key\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.156753 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7462fb55-15b7-4416-b34f-23893766b5ed-config-data\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.156771 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-horizon-tls-certs\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.156794 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7462fb55-15b7-4416-b34f-23893766b5ed-scripts\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.156818 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-combined-ca-bundle\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.215842 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg"] Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.219573 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.224665 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.225056 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.232067 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg"] Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.259429 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7462fb55-15b7-4416-b34f-23893766b5ed-logs\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.259480 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-secret-volume\") pod \"collect-profiles-29412465-r9brg\" (UID: \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.259504 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2ss9\" (UniqueName: \"kubernetes.io/projected/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-kube-api-access-w2ss9\") pod \"collect-profiles-29412465-r9brg\" (UID: \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.259840 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-horizon-secret-key\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.259873 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7462fb55-15b7-4416-b34f-23893766b5ed-config-data\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.259895 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-config-volume\") pod \"collect-profiles-29412465-r9brg\" (UID: \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.259917 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-horizon-tls-certs\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.259935 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7462fb55-15b7-4416-b34f-23893766b5ed-scripts\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.259966 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-combined-ca-bundle\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.260071 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8xmq\" (UniqueName: \"kubernetes.io/projected/7462fb55-15b7-4416-b34f-23893766b5ed-kube-api-access-l8xmq\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.283358 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-dccb9d97-l9zsv"] Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.303237 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-55fc5c6c94-pjh5s"] Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.315569 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.329205 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-55fc5c6c94-pjh5s"] Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.362455 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-secret-volume\") pod \"collect-profiles-29412465-r9brg\" (UID: \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.362507 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2ss9\" (UniqueName: \"kubernetes.io/projected/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-kube-api-access-w2ss9\") pod \"collect-profiles-29412465-r9brg\" (UID: \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.362554 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-config-volume\") pod \"collect-profiles-29412465-r9brg\" (UID: \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.362592 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/29d52104-a465-4ca0-a040-d9dba9e47600-config-data\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.362629 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/29d52104-a465-4ca0-a040-d9dba9e47600-horizon-tls-certs\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.362659 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/29d52104-a465-4ca0-a040-d9dba9e47600-horizon-secret-key\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.362732 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29d52104-a465-4ca0-a040-d9dba9e47600-scripts\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.362757 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29d52104-a465-4ca0-a040-d9dba9e47600-logs\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.362792 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5n8j\" (UniqueName: \"kubernetes.io/projected/29d52104-a465-4ca0-a040-d9dba9e47600-kube-api-access-b5n8j\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.362829 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d52104-a465-4ca0-a040-d9dba9e47600-combined-ca-bundle\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.465436 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d52104-a465-4ca0-a040-d9dba9e47600-combined-ca-bundle\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.465597 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/29d52104-a465-4ca0-a040-d9dba9e47600-config-data\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.465647 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/29d52104-a465-4ca0-a040-d9dba9e47600-horizon-tls-certs\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.465676 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/29d52104-a465-4ca0-a040-d9dba9e47600-horizon-secret-key\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.465746 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29d52104-a465-4ca0-a040-d9dba9e47600-scripts\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.465780 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29d52104-a465-4ca0-a040-d9dba9e47600-logs\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.465832 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5n8j\" (UniqueName: \"kubernetes.io/projected/29d52104-a465-4ca0-a040-d9dba9e47600-kube-api-access-b5n8j\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.503580 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-combined-ca-bundle\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.506010 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-config-volume\") pod \"collect-profiles-29412465-r9brg\" (UID: \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.506870 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/29d52104-a465-4ca0-a040-d9dba9e47600-config-data\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.507049 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-horizon-secret-key\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.507367 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-horizon-tls-certs\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.508640 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7462fb55-15b7-4416-b34f-23893766b5ed-logs\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.509674 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7462fb55-15b7-4416-b34f-23893766b5ed-config-data\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.510350 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/29d52104-a465-4ca0-a040-d9dba9e47600-horizon-secret-key\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.511849 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2ss9\" (UniqueName: \"kubernetes.io/projected/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-kube-api-access-w2ss9\") pod \"collect-profiles-29412465-r9brg\" (UID: \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.511914 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-secret-volume\") pod \"collect-profiles-29412465-r9brg\" (UID: \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.512110 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29d52104-a465-4ca0-a040-d9dba9e47600-scripts\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.512441 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8xmq\" (UniqueName: \"kubernetes.io/projected/7462fb55-15b7-4416-b34f-23893766b5ed-kube-api-access-l8xmq\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.512599 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7462fb55-15b7-4416-b34f-23893766b5ed-scripts\") pod \"horizon-767d79bd88-5spkc\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.516655 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29d52104-a465-4ca0-a040-d9dba9e47600-logs\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.517363 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/29d52104-a465-4ca0-a040-d9dba9e47600-horizon-tls-certs\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.518266 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5n8j\" (UniqueName: \"kubernetes.io/projected/29d52104-a465-4ca0-a040-d9dba9e47600-kube-api-access-b5n8j\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.531673 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d52104-a465-4ca0-a040-d9dba9e47600-combined-ca-bundle\") pod \"horizon-55fc5c6c94-pjh5s\" (UID: \"29d52104-a465-4ca0-a040-d9dba9e47600\") " pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.546157 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.775450 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:00 crc kubenswrapper[4612]: I1203 07:45:00.803830 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:01 crc kubenswrapper[4612]: I1203 07:45:01.750999 4612 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podd0142de5-2b0a-478b-b52e-4994b412c7c1"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podd0142de5-2b0a-478b-b52e-4994b412c7c1] : Timed out while waiting for systemd to remove kubepods-besteffort-podd0142de5_2b0a_478b_b52e_4994b412c7c1.slice" Dec 03 07:45:03 crc kubenswrapper[4612]: I1203 07:45:03.581835 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" podUID="343a43d3-b0e6-44f3-8089-747916e7932c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.130:5353: connect: connection refused" Dec 03 07:45:08 crc kubenswrapper[4612]: I1203 07:45:08.581723 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" podUID="343a43d3-b0e6-44f3-8089-747916e7932c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.130:5353: connect: connection refused" Dec 03 07:45:11 crc kubenswrapper[4612]: E1203 07:45:11.877006 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 03 07:45:11 crc kubenswrapper[4612]: E1203 07:45:11.877624 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfbh5c8h575h64bhbdh57bh5f6h5f5h544h659h565hch5ffh6dh557hf8h5f7hdfh599h569hb7h9dh665hc5h558h5d7h564h84h5c9hbdh78h8fq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2rn2x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-dccb9d97-l9zsv_openstack(a95a6747-6478-4737-8930-471e37160f38): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:45:11 crc kubenswrapper[4612]: E1203 07:45:11.884377 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-dccb9d97-l9zsv" podUID="a95a6747-6478-4737-8930-471e37160f38" Dec 03 07:45:11 crc kubenswrapper[4612]: E1203 07:45:11.893992 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 03 07:45:11 crc kubenswrapper[4612]: E1203 07:45:11.896107 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n545h9dh675h5d6h54fh57chc4h569h55dh94hd6h555h659h594h8dh5h5d8h55dh5fbh58bh5c7h68chd5h58ch584h9dh566hddh65dh655hcdhc7q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-knqbs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7fcddc56bf-d6w6x_openstack(98884a92-65ff-4910-ab6b-9161903018ad): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:45:11 crc kubenswrapper[4612]: E1203 07:45:11.898528 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7fcddc56bf-d6w6x" podUID="98884a92-65ff-4910-ab6b-9161903018ad" Dec 03 07:45:11 crc kubenswrapper[4612]: E1203 07:45:11.900002 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 03 07:45:11 crc kubenswrapper[4612]: E1203 07:45:11.900135 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n87h595h58fh697h57chd9h64bh697h5c4h59h5fbh577hc7h566h554h667h89h4h66ch56dh6hf9h67h66chc8h97h58bh5c5h677hb7hd6h565q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mk7nx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-74d97d6945-6mlkf_openstack(86a685b8-ba46-45f9-bcd7-07978507a53d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:45:11 crc kubenswrapper[4612]: E1203 07:45:11.909057 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-74d97d6945-6mlkf" podUID="86a685b8-ba46-45f9-bcd7-07978507a53d" Dec 03 07:45:11 crc kubenswrapper[4612]: I1203 07:45:11.941176 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.124539 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-fernet-keys\") pod \"ab909a7e-9663-490b-9cc6-f533605dacac\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.124669 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-scripts\") pod \"ab909a7e-9663-490b-9cc6-f533605dacac\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.124769 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4hg9\" (UniqueName: \"kubernetes.io/projected/ab909a7e-9663-490b-9cc6-f533605dacac-kube-api-access-p4hg9\") pod \"ab909a7e-9663-490b-9cc6-f533605dacac\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.124799 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-credential-keys\") pod \"ab909a7e-9663-490b-9cc6-f533605dacac\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.124839 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-config-data\") pod \"ab909a7e-9663-490b-9cc6-f533605dacac\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.124853 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-combined-ca-bundle\") pod \"ab909a7e-9663-490b-9cc6-f533605dacac\" (UID: \"ab909a7e-9663-490b-9cc6-f533605dacac\") " Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.131063 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ab909a7e-9663-490b-9cc6-f533605dacac" (UID: "ab909a7e-9663-490b-9cc6-f533605dacac"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.133736 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ab909a7e-9663-490b-9cc6-f533605dacac" (UID: "ab909a7e-9663-490b-9cc6-f533605dacac"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.134858 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab909a7e-9663-490b-9cc6-f533605dacac-kube-api-access-p4hg9" (OuterVolumeSpecName: "kube-api-access-p4hg9") pod "ab909a7e-9663-490b-9cc6-f533605dacac" (UID: "ab909a7e-9663-490b-9cc6-f533605dacac"). InnerVolumeSpecName "kube-api-access-p4hg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.151297 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-scripts" (OuterVolumeSpecName: "scripts") pod "ab909a7e-9663-490b-9cc6-f533605dacac" (UID: "ab909a7e-9663-490b-9cc6-f533605dacac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.159407 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab909a7e-9663-490b-9cc6-f533605dacac" (UID: "ab909a7e-9663-490b-9cc6-f533605dacac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.161755 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-config-data" (OuterVolumeSpecName: "config-data") pod "ab909a7e-9663-490b-9cc6-f533605dacac" (UID: "ab909a7e-9663-490b-9cc6-f533605dacac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.227641 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.227670 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4hg9\" (UniqueName: \"kubernetes.io/projected/ab909a7e-9663-490b-9cc6-f533605dacac-kube-api-access-p4hg9\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.227681 4612 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.227693 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.227702 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.227710 4612 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab909a7e-9663-490b-9cc6-f533605dacac-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.354770 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hpzvh" event={"ID":"ab909a7e-9663-490b-9cc6-f533605dacac","Type":"ContainerDied","Data":"d837fbf22db1651f296c04553cfd60826763125c62153457fedbb62ef7fd404b"} Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.355201 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d837fbf22db1651f296c04553cfd60826763125c62153457fedbb62ef7fd404b" Dec 03 07:45:12 crc kubenswrapper[4612]: I1203 07:45:12.355072 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hpzvh" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.127664 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-hpzvh"] Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.127977 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-hpzvh"] Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.219043 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-gc599"] Dec 03 07:45:13 crc kubenswrapper[4612]: E1203 07:45:13.219387 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab909a7e-9663-490b-9cc6-f533605dacac" containerName="keystone-bootstrap" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.219401 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab909a7e-9663-490b-9cc6-f533605dacac" containerName="keystone-bootstrap" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.219595 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab909a7e-9663-490b-9cc6-f533605dacac" containerName="keystone-bootstrap" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.220141 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.222633 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.222633 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.238233 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.238311 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-m29rd" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.238233 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.258620 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-gc599"] Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.352627 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-combined-ca-bundle\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.352693 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-fernet-keys\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.352712 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-config-data\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.352733 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-scripts\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.352823 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv8qf\" (UniqueName: \"kubernetes.io/projected/3a0871c4-f877-4382-8458-cc41ff21f67d-kube-api-access-vv8qf\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.353001 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-credential-keys\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.454900 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-credential-keys\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.454988 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-combined-ca-bundle\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.455037 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-fernet-keys\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.455059 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-config-data\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.455083 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-scripts\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.455114 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv8qf\" (UniqueName: \"kubernetes.io/projected/3a0871c4-f877-4382-8458-cc41ff21f67d-kube-api-access-vv8qf\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.460531 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-credential-keys\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.461164 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-fernet-keys\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.468269 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-combined-ca-bundle\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.469840 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-scripts\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.474923 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-config-data\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.481152 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv8qf\" (UniqueName: \"kubernetes.io/projected/3a0871c4-f877-4382-8458-cc41ff21f67d-kube-api-access-vv8qf\") pod \"keystone-bootstrap-gc599\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:13 crc kubenswrapper[4612]: I1203 07:45:13.539379 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:15 crc kubenswrapper[4612]: I1203 07:45:15.099663 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab909a7e-9663-490b-9cc6-f533605dacac" path="/var/lib/kubelet/pods/ab909a7e-9663-490b-9cc6-f533605dacac/volumes" Dec 03 07:45:16 crc kubenswrapper[4612]: I1203 07:45:16.388931 4612 generic.go:334] "Generic (PLEG): container finished" podID="c6433f62-cce6-47e4-971f-9d568a1e0cb3" containerID="f9db1bd3ad19cd9f1dab9636c058ac499116de875fc34ce04cb474d5a211da77" exitCode=0 Dec 03 07:45:16 crc kubenswrapper[4612]: I1203 07:45:16.388999 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4pvx9" event={"ID":"c6433f62-cce6-47e4-971f-9d568a1e0cb3","Type":"ContainerDied","Data":"f9db1bd3ad19cd9f1dab9636c058ac499116de875fc34ce04cb474d5a211da77"} Dec 03 07:45:18 crc kubenswrapper[4612]: I1203 07:45:18.581701 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" podUID="343a43d3-b0e6-44f3-8089-747916e7932c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.130:5353: i/o timeout" Dec 03 07:45:18 crc kubenswrapper[4612]: I1203 07:45:18.582742 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:45:18 crc kubenswrapper[4612]: I1203 07:45:18.984364 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 07:45:18 crc kubenswrapper[4612]: I1203 07:45:18.984420 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 07:45:19 crc kubenswrapper[4612]: I1203 07:45:19.255422 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:19 crc kubenswrapper[4612]: I1203 07:45:19.255613 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.682671 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.690068 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.793763 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-config-data\") pod \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.793829 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/193f36c9-49e4-4376-90bc-656c1ce4251d-logs\") pod \"193f36c9-49e4-4376-90bc-656c1ce4251d\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.793872 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.793902 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"193f36c9-49e4-4376-90bc-656c1ce4251d\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.793977 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-scripts\") pod \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.794010 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/482ba975-80ee-4ff8-86de-9a3d589d1bf8-logs\") pod \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.794039 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/193f36c9-49e4-4376-90bc-656c1ce4251d-httpd-run\") pod \"193f36c9-49e4-4376-90bc-656c1ce4251d\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.794095 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlfzc\" (UniqueName: \"kubernetes.io/projected/193f36c9-49e4-4376-90bc-656c1ce4251d-kube-api-access-tlfzc\") pod \"193f36c9-49e4-4376-90bc-656c1ce4251d\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.794132 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-scripts\") pod \"193f36c9-49e4-4376-90bc-656c1ce4251d\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.794154 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-combined-ca-bundle\") pod \"193f36c9-49e4-4376-90bc-656c1ce4251d\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.794202 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-config-data\") pod \"193f36c9-49e4-4376-90bc-656c1ce4251d\" (UID: \"193f36c9-49e4-4376-90bc-656c1ce4251d\") " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.794239 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfj9w\" (UniqueName: \"kubernetes.io/projected/482ba975-80ee-4ff8-86de-9a3d589d1bf8-kube-api-access-mfj9w\") pod \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.794265 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/482ba975-80ee-4ff8-86de-9a3d589d1bf8-httpd-run\") pod \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.794308 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-combined-ca-bundle\") pod \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\" (UID: \"482ba975-80ee-4ff8-86de-9a3d589d1bf8\") " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.795136 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/482ba975-80ee-4ff8-86de-9a3d589d1bf8-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "482ba975-80ee-4ff8-86de-9a3d589d1bf8" (UID: "482ba975-80ee-4ff8-86de-9a3d589d1bf8"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.796341 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/193f36c9-49e4-4376-90bc-656c1ce4251d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "193f36c9-49e4-4376-90bc-656c1ce4251d" (UID: "193f36c9-49e4-4376-90bc-656c1ce4251d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.799305 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/482ba975-80ee-4ff8-86de-9a3d589d1bf8-kube-api-access-mfj9w" (OuterVolumeSpecName: "kube-api-access-mfj9w") pod "482ba975-80ee-4ff8-86de-9a3d589d1bf8" (UID: "482ba975-80ee-4ff8-86de-9a3d589d1bf8"). InnerVolumeSpecName "kube-api-access-mfj9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.799380 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/193f36c9-49e4-4376-90bc-656c1ce4251d-kube-api-access-tlfzc" (OuterVolumeSpecName: "kube-api-access-tlfzc") pod "193f36c9-49e4-4376-90bc-656c1ce4251d" (UID: "193f36c9-49e4-4376-90bc-656c1ce4251d"). InnerVolumeSpecName "kube-api-access-tlfzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.799669 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/193f36c9-49e4-4376-90bc-656c1ce4251d-logs" (OuterVolumeSpecName: "logs") pod "193f36c9-49e4-4376-90bc-656c1ce4251d" (UID: "193f36c9-49e4-4376-90bc-656c1ce4251d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.800161 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "482ba975-80ee-4ff8-86de-9a3d589d1bf8" (UID: "482ba975-80ee-4ff8-86de-9a3d589d1bf8"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.800474 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/482ba975-80ee-4ff8-86de-9a3d589d1bf8-logs" (OuterVolumeSpecName: "logs") pod "482ba975-80ee-4ff8-86de-9a3d589d1bf8" (UID: "482ba975-80ee-4ff8-86de-9a3d589d1bf8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.800587 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-scripts" (OuterVolumeSpecName: "scripts") pod "193f36c9-49e4-4376-90bc-656c1ce4251d" (UID: "193f36c9-49e4-4376-90bc-656c1ce4251d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.802936 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-scripts" (OuterVolumeSpecName: "scripts") pod "482ba975-80ee-4ff8-86de-9a3d589d1bf8" (UID: "482ba975-80ee-4ff8-86de-9a3d589d1bf8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.821346 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "193f36c9-49e4-4376-90bc-656c1ce4251d" (UID: "193f36c9-49e4-4376-90bc-656c1ce4251d"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.830662 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "482ba975-80ee-4ff8-86de-9a3d589d1bf8" (UID: "482ba975-80ee-4ff8-86de-9a3d589d1bf8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.851128 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "193f36c9-49e4-4376-90bc-656c1ce4251d" (UID: "193f36c9-49e4-4376-90bc-656c1ce4251d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.851657 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-config-data" (OuterVolumeSpecName: "config-data") pod "482ba975-80ee-4ff8-86de-9a3d589d1bf8" (UID: "482ba975-80ee-4ff8-86de-9a3d589d1bf8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.863586 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-config-data" (OuterVolumeSpecName: "config-data") pod "193f36c9-49e4-4376-90bc-656c1ce4251d" (UID: "193f36c9-49e4-4376-90bc-656c1ce4251d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.896742 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.896773 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/193f36c9-49e4-4376-90bc-656c1ce4251d-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.896806 4612 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.896822 4612 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.896836 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.903648 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/482ba975-80ee-4ff8-86de-9a3d589d1bf8-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.903756 4612 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/193f36c9-49e4-4376-90bc-656c1ce4251d-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.903865 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlfzc\" (UniqueName: \"kubernetes.io/projected/193f36c9-49e4-4376-90bc-656c1ce4251d-kube-api-access-tlfzc\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.903927 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.904019 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.904093 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/193f36c9-49e4-4376-90bc-656c1ce4251d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.904152 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfj9w\" (UniqueName: \"kubernetes.io/projected/482ba975-80ee-4ff8-86de-9a3d589d1bf8-kube-api-access-mfj9w\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.904218 4612 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/482ba975-80ee-4ff8-86de-9a3d589d1bf8-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.904286 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482ba975-80ee-4ff8-86de-9a3d589d1bf8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.917835 4612 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 03 07:45:20 crc kubenswrapper[4612]: I1203 07:45:20.919146 4612 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.005635 4612 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.005664 4612 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: E1203 07:45:21.170832 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 03 07:45:21 crc kubenswrapper[4612]: E1203 07:45:21.170966 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2j4vj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-ptq9j_openstack(2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:45:21 crc kubenswrapper[4612]: E1203 07:45:21.172162 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-ptq9j" podUID="2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.173800 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.182755 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.207981 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4pvx9" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.213125 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.218469 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312456 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-dns-svc\") pod \"343a43d3-b0e6-44f3-8089-747916e7932c\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312500 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-ovsdbserver-nb\") pod \"343a43d3-b0e6-44f3-8089-747916e7932c\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312535 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-config\") pod \"343a43d3-b0e6-44f3-8089-747916e7932c\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312576 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/98884a92-65ff-4910-ab6b-9161903018ad-config-data\") pod \"98884a92-65ff-4910-ab6b-9161903018ad\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312596 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rn2x\" (UniqueName: \"kubernetes.io/projected/a95a6747-6478-4737-8930-471e37160f38-kube-api-access-2rn2x\") pod \"a95a6747-6478-4737-8930-471e37160f38\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312634 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hxcn\" (UniqueName: \"kubernetes.io/projected/343a43d3-b0e6-44f3-8089-747916e7932c-kube-api-access-4hxcn\") pod \"343a43d3-b0e6-44f3-8089-747916e7932c\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312673 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a95a6747-6478-4737-8930-471e37160f38-config-data\") pod \"a95a6747-6478-4737-8930-471e37160f38\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312694 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmk9d\" (UniqueName: \"kubernetes.io/projected/c6433f62-cce6-47e4-971f-9d568a1e0cb3-kube-api-access-lmk9d\") pod \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\" (UID: \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312719 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/98884a92-65ff-4910-ab6b-9161903018ad-horizon-secret-key\") pod \"98884a92-65ff-4910-ab6b-9161903018ad\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312754 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-ovsdbserver-sb\") pod \"343a43d3-b0e6-44f3-8089-747916e7932c\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312773 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98884a92-65ff-4910-ab6b-9161903018ad-logs\") pod \"98884a92-65ff-4910-ab6b-9161903018ad\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312789 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a95a6747-6478-4737-8930-471e37160f38-scripts\") pod \"a95a6747-6478-4737-8930-471e37160f38\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312806 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mk7nx\" (UniqueName: \"kubernetes.io/projected/86a685b8-ba46-45f9-bcd7-07978507a53d-kube-api-access-mk7nx\") pod \"86a685b8-ba46-45f9-bcd7-07978507a53d\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312825 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6433f62-cce6-47e4-971f-9d568a1e0cb3-combined-ca-bundle\") pod \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\" (UID: \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312843 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-dns-swift-storage-0\") pod \"343a43d3-b0e6-44f3-8089-747916e7932c\" (UID: \"343a43d3-b0e6-44f3-8089-747916e7932c\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312876 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98884a92-65ff-4910-ab6b-9161903018ad-scripts\") pod \"98884a92-65ff-4910-ab6b-9161903018ad\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.312921 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a95a6747-6478-4737-8930-471e37160f38-horizon-secret-key\") pod \"a95a6747-6478-4737-8930-471e37160f38\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.313004 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a95a6747-6478-4737-8930-471e37160f38-logs\") pod \"a95a6747-6478-4737-8930-471e37160f38\" (UID: \"a95a6747-6478-4737-8930-471e37160f38\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.313027 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/86a685b8-ba46-45f9-bcd7-07978507a53d-scripts\") pod \"86a685b8-ba46-45f9-bcd7-07978507a53d\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.313055 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/86a685b8-ba46-45f9-bcd7-07978507a53d-config-data\") pod \"86a685b8-ba46-45f9-bcd7-07978507a53d\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.313069 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/86a685b8-ba46-45f9-bcd7-07978507a53d-horizon-secret-key\") pod \"86a685b8-ba46-45f9-bcd7-07978507a53d\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.313095 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6433f62-cce6-47e4-971f-9d568a1e0cb3-config\") pod \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\" (UID: \"c6433f62-cce6-47e4-971f-9d568a1e0cb3\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.313113 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-knqbs\" (UniqueName: \"kubernetes.io/projected/98884a92-65ff-4910-ab6b-9161903018ad-kube-api-access-knqbs\") pod \"98884a92-65ff-4910-ab6b-9161903018ad\" (UID: \"98884a92-65ff-4910-ab6b-9161903018ad\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.313133 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86a685b8-ba46-45f9-bcd7-07978507a53d-logs\") pod \"86a685b8-ba46-45f9-bcd7-07978507a53d\" (UID: \"86a685b8-ba46-45f9-bcd7-07978507a53d\") " Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.313763 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86a685b8-ba46-45f9-bcd7-07978507a53d-logs" (OuterVolumeSpecName: "logs") pod "86a685b8-ba46-45f9-bcd7-07978507a53d" (UID: "86a685b8-ba46-45f9-bcd7-07978507a53d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.314737 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a95a6747-6478-4737-8930-471e37160f38-logs" (OuterVolumeSpecName: "logs") pod "a95a6747-6478-4737-8930-471e37160f38" (UID: "a95a6747-6478-4737-8930-471e37160f38"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.323309 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86a685b8-ba46-45f9-bcd7-07978507a53d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "86a685b8-ba46-45f9-bcd7-07978507a53d" (UID: "86a685b8-ba46-45f9-bcd7-07978507a53d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.323437 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6433f62-cce6-47e4-971f-9d568a1e0cb3-kube-api-access-lmk9d" (OuterVolumeSpecName: "kube-api-access-lmk9d") pod "c6433f62-cce6-47e4-971f-9d568a1e0cb3" (UID: "c6433f62-cce6-47e4-971f-9d568a1e0cb3"). InnerVolumeSpecName "kube-api-access-lmk9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.323667 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86a685b8-ba46-45f9-bcd7-07978507a53d-kube-api-access-mk7nx" (OuterVolumeSpecName: "kube-api-access-mk7nx") pod "86a685b8-ba46-45f9-bcd7-07978507a53d" (UID: "86a685b8-ba46-45f9-bcd7-07978507a53d"). InnerVolumeSpecName "kube-api-access-mk7nx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.324202 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86a685b8-ba46-45f9-bcd7-07978507a53d-scripts" (OuterVolumeSpecName: "scripts") pod "86a685b8-ba46-45f9-bcd7-07978507a53d" (UID: "86a685b8-ba46-45f9-bcd7-07978507a53d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.324205 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98884a92-65ff-4910-ab6b-9161903018ad-logs" (OuterVolumeSpecName: "logs") pod "98884a92-65ff-4910-ab6b-9161903018ad" (UID: "98884a92-65ff-4910-ab6b-9161903018ad"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.324308 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98884a92-65ff-4910-ab6b-9161903018ad-config-data" (OuterVolumeSpecName: "config-data") pod "98884a92-65ff-4910-ab6b-9161903018ad" (UID: "98884a92-65ff-4910-ab6b-9161903018ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.324742 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86a685b8-ba46-45f9-bcd7-07978507a53d-config-data" (OuterVolumeSpecName: "config-data") pod "86a685b8-ba46-45f9-bcd7-07978507a53d" (UID: "86a685b8-ba46-45f9-bcd7-07978507a53d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.325153 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a95a6747-6478-4737-8930-471e37160f38-scripts" (OuterVolumeSpecName: "scripts") pod "a95a6747-6478-4737-8930-471e37160f38" (UID: "a95a6747-6478-4737-8930-471e37160f38"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.331820 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a95a6747-6478-4737-8930-471e37160f38-config-data" (OuterVolumeSpecName: "config-data") pod "a95a6747-6478-4737-8930-471e37160f38" (UID: "a95a6747-6478-4737-8930-471e37160f38"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.332898 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98884a92-65ff-4910-ab6b-9161903018ad-scripts" (OuterVolumeSpecName: "scripts") pod "98884a92-65ff-4910-ab6b-9161903018ad" (UID: "98884a92-65ff-4910-ab6b-9161903018ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.337140 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98884a92-65ff-4910-ab6b-9161903018ad-kube-api-access-knqbs" (OuterVolumeSpecName: "kube-api-access-knqbs") pod "98884a92-65ff-4910-ab6b-9161903018ad" (UID: "98884a92-65ff-4910-ab6b-9161903018ad"). InnerVolumeSpecName "kube-api-access-knqbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.337499 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98884a92-65ff-4910-ab6b-9161903018ad-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "98884a92-65ff-4910-ab6b-9161903018ad" (UID: "98884a92-65ff-4910-ab6b-9161903018ad"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.337665 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/343a43d3-b0e6-44f3-8089-747916e7932c-kube-api-access-4hxcn" (OuterVolumeSpecName: "kube-api-access-4hxcn") pod "343a43d3-b0e6-44f3-8089-747916e7932c" (UID: "343a43d3-b0e6-44f3-8089-747916e7932c"). InnerVolumeSpecName "kube-api-access-4hxcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.337924 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a95a6747-6478-4737-8930-471e37160f38-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a95a6747-6478-4737-8930-471e37160f38" (UID: "a95a6747-6478-4737-8930-471e37160f38"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.341661 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a95a6747-6478-4737-8930-471e37160f38-kube-api-access-2rn2x" (OuterVolumeSpecName: "kube-api-access-2rn2x") pod "a95a6747-6478-4737-8930-471e37160f38" (UID: "a95a6747-6478-4737-8930-471e37160f38"). InnerVolumeSpecName "kube-api-access-2rn2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.352857 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6433f62-cce6-47e4-971f-9d568a1e0cb3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6433f62-cce6-47e4-971f-9d568a1e0cb3" (UID: "c6433f62-cce6-47e4-971f-9d568a1e0cb3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.362772 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "343a43d3-b0e6-44f3-8089-747916e7932c" (UID: "343a43d3-b0e6-44f3-8089-747916e7932c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.363077 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6433f62-cce6-47e4-971f-9d568a1e0cb3-config" (OuterVolumeSpecName: "config") pod "c6433f62-cce6-47e4-971f-9d568a1e0cb3" (UID: "c6433f62-cce6-47e4-971f-9d568a1e0cb3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.373714 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-config" (OuterVolumeSpecName: "config") pod "343a43d3-b0e6-44f3-8089-747916e7932c" (UID: "343a43d3-b0e6-44f3-8089-747916e7932c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.391468 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "343a43d3-b0e6-44f3-8089-747916e7932c" (UID: "343a43d3-b0e6-44f3-8089-747916e7932c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.391966 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "343a43d3-b0e6-44f3-8089-747916e7932c" (UID: "343a43d3-b0e6-44f3-8089-747916e7932c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.393592 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "343a43d3-b0e6-44f3-8089-747916e7932c" (UID: "343a43d3-b0e6-44f3-8089-747916e7932c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415278 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hxcn\" (UniqueName: \"kubernetes.io/projected/343a43d3-b0e6-44f3-8089-747916e7932c-kube-api-access-4hxcn\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415303 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a95a6747-6478-4737-8930-471e37160f38-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415313 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmk9d\" (UniqueName: \"kubernetes.io/projected/c6433f62-cce6-47e4-971f-9d568a1e0cb3-kube-api-access-lmk9d\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415322 4612 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/98884a92-65ff-4910-ab6b-9161903018ad-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415331 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415339 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98884a92-65ff-4910-ab6b-9161903018ad-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415347 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mk7nx\" (UniqueName: \"kubernetes.io/projected/86a685b8-ba46-45f9-bcd7-07978507a53d-kube-api-access-mk7nx\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415355 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a95a6747-6478-4737-8930-471e37160f38-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415363 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6433f62-cce6-47e4-971f-9d568a1e0cb3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415373 4612 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415381 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98884a92-65ff-4910-ab6b-9161903018ad-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415388 4612 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a95a6747-6478-4737-8930-471e37160f38-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415396 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a95a6747-6478-4737-8930-471e37160f38-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415404 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/86a685b8-ba46-45f9-bcd7-07978507a53d-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415413 4612 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/86a685b8-ba46-45f9-bcd7-07978507a53d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415421 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/86a685b8-ba46-45f9-bcd7-07978507a53d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415429 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6433f62-cce6-47e4-971f-9d568a1e0cb3-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415437 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-knqbs\" (UniqueName: \"kubernetes.io/projected/98884a92-65ff-4910-ab6b-9161903018ad-kube-api-access-knqbs\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415446 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86a685b8-ba46-45f9-bcd7-07978507a53d-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415453 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415460 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415468 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/343a43d3-b0e6-44f3-8089-747916e7932c-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415477 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/98884a92-65ff-4910-ab6b-9161903018ad-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.415485 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rn2x\" (UniqueName: \"kubernetes.io/projected/a95a6747-6478-4737-8930-471e37160f38-kube-api-access-2rn2x\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.432453 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4pvx9" event={"ID":"c6433f62-cce6-47e4-971f-9d568a1e0cb3","Type":"ContainerDied","Data":"fa07a2d1afec17105f3d847bf9bfa2146ee9651a272420b81a0b3ae36d7bf79a"} Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.432494 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa07a2d1afec17105f3d847bf9bfa2146ee9651a272420b81a0b3ae36d7bf79a" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.432507 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4pvx9" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.434372 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7fcddc56bf-d6w6x" event={"ID":"98884a92-65ff-4910-ab6b-9161903018ad","Type":"ContainerDied","Data":"f952c8f77326cbd4eea963867c133eebeb2067075d6e3b3b0fac84c94e5fb817"} Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.434393 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7fcddc56bf-d6w6x" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.436397 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dccb9d97-l9zsv" event={"ID":"a95a6747-6478-4737-8930-471e37160f38","Type":"ContainerDied","Data":"668b5beaa978b48d928744711b1147c27909677b017c37d4396026717fe2fa50"} Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.436415 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dccb9d97-l9zsv" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.439988 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"193f36c9-49e4-4376-90bc-656c1ce4251d","Type":"ContainerDied","Data":"2249112677f47ae6f3386588dc4a78322a7f27b7ddf0d6fb2f76265071ec5ab0"} Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.440032 4612 scope.go:117] "RemoveContainer" containerID="c8e752b04e416e686fcaf60eb8827ac39d2ee91550d7e697813aad129eadb20b" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.440122 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.451992 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" event={"ID":"343a43d3-b0e6-44f3-8089-747916e7932c","Type":"ContainerDied","Data":"4b51aa3fc03fb1aa3c0ab58ce4a5e2d05cec2b008109819e04141017b0678190"} Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.452093 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.462428 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"482ba975-80ee-4ff8-86de-9a3d589d1bf8","Type":"ContainerDied","Data":"5aab081ad0eb6c57ac46c424111ebea21f273328fa0aad590d486a3eee96dd85"} Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.462529 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.482374 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74d97d6945-6mlkf" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.482964 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-74d97d6945-6mlkf" event={"ID":"86a685b8-ba46-45f9-bcd7-07978507a53d","Type":"ContainerDied","Data":"4060304c795cb1b5e1f71188c8cba307b3927cb90e8b02e67c26e664057d7800"} Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.487087 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:45:21 crc kubenswrapper[4612]: E1203 07:45:21.493836 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-ptq9j" podUID="2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.496662 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.506225 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:45:21 crc kubenswrapper[4612]: E1203 07:45:21.506574 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="343a43d3-b0e6-44f3-8089-747916e7932c" containerName="dnsmasq-dns" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.506592 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="343a43d3-b0e6-44f3-8089-747916e7932c" containerName="dnsmasq-dns" Dec 03 07:45:21 crc kubenswrapper[4612]: E1203 07:45:21.506614 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="193f36c9-49e4-4376-90bc-656c1ce4251d" containerName="glance-log" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.506622 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="193f36c9-49e4-4376-90bc-656c1ce4251d" containerName="glance-log" Dec 03 07:45:21 crc kubenswrapper[4612]: E1203 07:45:21.506632 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="343a43d3-b0e6-44f3-8089-747916e7932c" containerName="init" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.506638 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="343a43d3-b0e6-44f3-8089-747916e7932c" containerName="init" Dec 03 07:45:21 crc kubenswrapper[4612]: E1203 07:45:21.506654 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="482ba975-80ee-4ff8-86de-9a3d589d1bf8" containerName="glance-log" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.506660 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="482ba975-80ee-4ff8-86de-9a3d589d1bf8" containerName="glance-log" Dec 03 07:45:21 crc kubenswrapper[4612]: E1203 07:45:21.506675 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="482ba975-80ee-4ff8-86de-9a3d589d1bf8" containerName="glance-httpd" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.506681 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="482ba975-80ee-4ff8-86de-9a3d589d1bf8" containerName="glance-httpd" Dec 03 07:45:21 crc kubenswrapper[4612]: E1203 07:45:21.506691 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6433f62-cce6-47e4-971f-9d568a1e0cb3" containerName="neutron-db-sync" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.506698 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6433f62-cce6-47e4-971f-9d568a1e0cb3" containerName="neutron-db-sync" Dec 03 07:45:21 crc kubenswrapper[4612]: E1203 07:45:21.506711 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="193f36c9-49e4-4376-90bc-656c1ce4251d" containerName="glance-httpd" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.506717 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="193f36c9-49e4-4376-90bc-656c1ce4251d" containerName="glance-httpd" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.506872 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="482ba975-80ee-4ff8-86de-9a3d589d1bf8" containerName="glance-httpd" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.506892 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="193f36c9-49e4-4376-90bc-656c1ce4251d" containerName="glance-log" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.506910 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="482ba975-80ee-4ff8-86de-9a3d589d1bf8" containerName="glance-log" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.506920 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="193f36c9-49e4-4376-90bc-656c1ce4251d" containerName="glance-httpd" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.506929 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="343a43d3-b0e6-44f3-8089-747916e7932c" containerName="dnsmasq-dns" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.506956 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6433f62-cce6-47e4-971f-9d568a1e0cb3" containerName="neutron-db-sync" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.507799 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.520815 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.522446 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-d6pcb" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.524336 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.525141 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.548507 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.617489 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7fcddc56bf-d6w6x"] Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.635009 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.635099 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.635154 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00a52936-281e-497d-a9c4-9216dcc33120-logs\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.635186 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcdfb\" (UniqueName: \"kubernetes.io/projected/00a52936-281e-497d-a9c4-9216dcc33120-kube-api-access-bcdfb\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.635242 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-config-data\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.635262 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.635279 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-scripts\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.635308 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/00a52936-281e-497d-a9c4-9216dcc33120-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.667579 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7fcddc56bf-d6w6x"] Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.737478 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.737580 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.737688 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00a52936-281e-497d-a9c4-9216dcc33120-logs\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.738044 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcdfb\" (UniqueName: \"kubernetes.io/projected/00a52936-281e-497d-a9c4-9216dcc33120-kube-api-access-bcdfb\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.738130 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-config-data\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.738204 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.738231 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-scripts\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.738292 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/00a52936-281e-497d-a9c4-9216dcc33120-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.749760 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.751874 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-scripts\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.751960 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-dccb9d97-l9zsv"] Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.788242 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/00a52936-281e-497d-a9c4-9216dcc33120-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.788334 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-dccb9d97-l9zsv"] Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.788509 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00a52936-281e-497d-a9c4-9216dcc33120-logs\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.793788 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-config-data\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.796725 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.805053 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.805404 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcdfb\" (UniqueName: \"kubernetes.io/projected/00a52936-281e-497d-a9c4-9216dcc33120-kube-api-access-bcdfb\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.828488 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.829417 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-74d97d6945-6mlkf"] Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.837287 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-74d97d6945-6mlkf"] Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.853580 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.869928 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.880807 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.884493 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.885036 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-4tdwk"] Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.887867 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.888153 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.891990 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-4tdwk"] Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.903005 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.939976 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/985c25c3-b874-4132-a5d8-366ab5e0fab6-logs\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.940029 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.940060 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-config-data\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.940108 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-scripts\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.940224 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/985c25c3-b874-4132-a5d8-366ab5e0fab6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.940276 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rdpw\" (UniqueName: \"kubernetes.io/projected/985c25c3-b874-4132-a5d8-366ab5e0fab6-kube-api-access-8rdpw\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.940302 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:21 crc kubenswrapper[4612]: I1203 07:45:21.940378 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.041900 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.041997 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-config-data\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.042075 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-scripts\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.042103 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/985c25c3-b874-4132-a5d8-366ab5e0fab6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.042127 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rdpw\" (UniqueName: \"kubernetes.io/projected/985c25c3-b874-4132-a5d8-366ab5e0fab6-kube-api-access-8rdpw\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.042155 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.042193 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.042261 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/985c25c3-b874-4132-a5d8-366ab5e0fab6-logs\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.042704 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/985c25c3-b874-4132-a5d8-366ab5e0fab6-logs\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.043005 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/985c25c3-b874-4132-a5d8-366ab5e0fab6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.043715 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.046122 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.047356 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-scripts\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.049722 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-config-data\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.059819 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.068706 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rdpw\" (UniqueName: \"kubernetes.io/projected/985c25c3-b874-4132-a5d8-366ab5e0fab6-kube-api-access-8rdpw\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.086027 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.139506 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.228633 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.561089 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-j7p7n"] Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.563916 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.607839 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-j7p7n"] Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.659681 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.659764 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.659794 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2mcv\" (UniqueName: \"kubernetes.io/projected/189d4587-3dd1-495a-9906-99f66131135c-kube-api-access-j2mcv\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.659818 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-config\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.659868 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.659889 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.761019 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2mcv\" (UniqueName: \"kubernetes.io/projected/189d4587-3dd1-495a-9906-99f66131135c-kube-api-access-j2mcv\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.761069 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-config\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.761118 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.761140 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.761190 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.761241 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.762355 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.762489 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.762829 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.762880 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.763105 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-config\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.810914 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2mcv\" (UniqueName: \"kubernetes.io/projected/189d4587-3dd1-495a-9906-99f66131135c-kube-api-access-j2mcv\") pod \"dnsmasq-dns-5ccc5c4795-j7p7n\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.822083 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-79dcc9984-j6bsp"] Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.823497 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.829563 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-mq4ct" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.829969 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.830224 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.830468 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.832072 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-79dcc9984-j6bsp"] Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.865839 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-ovndb-tls-certs\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.865906 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-config\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.866490 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-combined-ca-bundle\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.866548 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw8wc\" (UniqueName: \"kubernetes.io/projected/871a775f-4f35-4128-8198-8bfa7df3ea61-kube-api-access-sw8wc\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.866576 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-httpd-config\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.913484 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.968112 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-ovndb-tls-certs\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.968175 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-config\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.968241 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-combined-ca-bundle\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.968293 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw8wc\" (UniqueName: \"kubernetes.io/projected/871a775f-4f35-4128-8198-8bfa7df3ea61-kube-api-access-sw8wc\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.968320 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-httpd-config\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.972384 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-httpd-config\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.973126 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-ovndb-tls-certs\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.978731 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-combined-ca-bundle\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.992644 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-config\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:22 crc kubenswrapper[4612]: I1203 07:45:22.993292 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw8wc\" (UniqueName: \"kubernetes.io/projected/871a775f-4f35-4128-8198-8bfa7df3ea61-kube-api-access-sw8wc\") pod \"neutron-79dcc9984-j6bsp\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:23 crc kubenswrapper[4612]: I1203 07:45:23.102289 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="193f36c9-49e4-4376-90bc-656c1ce4251d" path="/var/lib/kubelet/pods/193f36c9-49e4-4376-90bc-656c1ce4251d/volumes" Dec 03 07:45:23 crc kubenswrapper[4612]: I1203 07:45:23.104438 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="343a43d3-b0e6-44f3-8089-747916e7932c" path="/var/lib/kubelet/pods/343a43d3-b0e6-44f3-8089-747916e7932c/volumes" Dec 03 07:45:23 crc kubenswrapper[4612]: I1203 07:45:23.105065 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="482ba975-80ee-4ff8-86de-9a3d589d1bf8" path="/var/lib/kubelet/pods/482ba975-80ee-4ff8-86de-9a3d589d1bf8/volumes" Dec 03 07:45:23 crc kubenswrapper[4612]: I1203 07:45:23.106323 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86a685b8-ba46-45f9-bcd7-07978507a53d" path="/var/lib/kubelet/pods/86a685b8-ba46-45f9-bcd7-07978507a53d/volumes" Dec 03 07:45:23 crc kubenswrapper[4612]: I1203 07:45:23.106957 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98884a92-65ff-4910-ab6b-9161903018ad" path="/var/lib/kubelet/pods/98884a92-65ff-4910-ab6b-9161903018ad/volumes" Dec 03 07:45:23 crc kubenswrapper[4612]: I1203 07:45:23.107527 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a95a6747-6478-4737-8930-471e37160f38" path="/var/lib/kubelet/pods/a95a6747-6478-4737-8930-471e37160f38/volumes" Dec 03 07:45:23 crc kubenswrapper[4612]: I1203 07:45:23.149350 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:23 crc kubenswrapper[4612]: I1203 07:45:23.583428 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tdwk" podUID="343a43d3-b0e6-44f3-8089-747916e7932c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.130:5353: i/o timeout" Dec 03 07:45:23 crc kubenswrapper[4612]: E1203 07:45:23.675603 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 03 07:45:23 crc kubenswrapper[4612]: E1203 07:45:23.675755 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6f84q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-nvxkr_openstack(cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 07:45:23 crc kubenswrapper[4612]: E1203 07:45:23.677632 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-nvxkr" podUID="cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08" Dec 03 07:45:23 crc kubenswrapper[4612]: I1203 07:45:23.776230 4612 scope.go:117] "RemoveContainer" containerID="a20d83d70160a8b5dbd9fb5e69694389f14c0cc9db2574d6cadd922fde879e58" Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.044280 4612 scope.go:117] "RemoveContainer" containerID="3a929e2e4c5fb428efd2e3e91f4b24090d3f569a3644c271fca0b25444b1b101" Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.150136 4612 scope.go:117] "RemoveContainer" containerID="a2eddebaa94aa7d16452dc6c2f7ec069a3a188fe1e57044b4da920ad09eee655" Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.249090 4612 scope.go:117] "RemoveContainer" containerID="a4ed634c56c37b93b779247db973cf0e3967496ec549a889529dc8ea9a4b21bd" Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.292052 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-55fc5c6c94-pjh5s"] Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.321894 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-767d79bd88-5spkc"] Dec 03 07:45:24 crc kubenswrapper[4612]: W1203 07:45:24.345719 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29d52104_a465_4ca0_a040_d9dba9e47600.slice/crio-badfb7e92e1101ccd5ec20b5b04925c32d39d1a68796aab718b664d6628f1af0 WatchSource:0}: Error finding container badfb7e92e1101ccd5ec20b5b04925c32d39d1a68796aab718b664d6628f1af0: Status 404 returned error can't find the container with id badfb7e92e1101ccd5ec20b5b04925c32d39d1a68796aab718b664d6628f1af0 Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.392286 4612 scope.go:117] "RemoveContainer" containerID="dd19c7c94df363f7928866c2496088fb9f606a33e2be09483da6119612b968c2" Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.529483 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-pqqrz" event={"ID":"0600693c-3ac0-4d42-8efd-c3140c6474a8","Type":"ContainerStarted","Data":"34d6daa6f15b1c60dccf87c84047399bc6abe70b6604de7104cfcbaf26ab6442"} Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.543043 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-767d79bd88-5spkc" event={"ID":"7462fb55-15b7-4416-b34f-23893766b5ed","Type":"ContainerStarted","Data":"ecd5a2b56e26e33abfea2e53aca51a098323619eb518b12f77c4f68f7e07cd21"} Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.544760 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8","Type":"ContainerStarted","Data":"a2727bf28968388de34caf11446b8d61b4e0049a4216e4832a2211c2fad17689"} Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.549313 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55fc5c6c94-pjh5s" event={"ID":"29d52104-a465-4ca0-a040-d9dba9e47600","Type":"ContainerStarted","Data":"badfb7e92e1101ccd5ec20b5b04925c32d39d1a68796aab718b664d6628f1af0"} Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.559373 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-pqqrz" podStartSLOduration=5.817225798 podStartE2EDuration="36.559349946s" podCreationTimestamp="2025-12-03 07:44:48 +0000 UTC" firstStartedPulling="2025-12-03 07:44:49.820570035 +0000 UTC m=+1052.993927435" lastFinishedPulling="2025-12-03 07:45:20.562694113 +0000 UTC m=+1083.736051583" observedRunningTime="2025-12-03 07:45:24.553423459 +0000 UTC m=+1087.726780869" watchObservedRunningTime="2025-12-03 07:45:24.559349946 +0000 UTC m=+1087.732707356" Dec 03 07:45:24 crc kubenswrapper[4612]: E1203 07:45:24.562917 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-nvxkr" podUID="cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08" Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.800776 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-j7p7n"] Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.822222 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg"] Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.836642 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-gc599"] Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.854221 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.971998 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.995255 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6f7b8b49c9-7g7fc"] Dec 03 07:45:24 crc kubenswrapper[4612]: I1203 07:45:24.997158 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:24.999877 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.000064 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.007916 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6f7b8b49c9-7g7fc"] Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.133414 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t75hv\" (UniqueName: \"kubernetes.io/projected/d86bd30e-374a-4a76-be08-89a4e3310b61-kube-api-access-t75hv\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.133562 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-ovndb-tls-certs\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.133599 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-public-tls-certs\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.133716 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-internal-tls-certs\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.133758 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-config\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.133846 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-combined-ca-bundle\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.133871 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-httpd-config\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.235056 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t75hv\" (UniqueName: \"kubernetes.io/projected/d86bd30e-374a-4a76-be08-89a4e3310b61-kube-api-access-t75hv\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.235158 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-ovndb-tls-certs\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.235185 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-public-tls-certs\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.235229 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-internal-tls-certs\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.235254 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-config\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.235276 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-combined-ca-bundle\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.235293 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-httpd-config\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.244718 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-public-tls-certs\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.245493 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-combined-ca-bundle\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.247183 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-internal-tls-certs\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.247289 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-ovndb-tls-certs\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.253838 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-httpd-config\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.262030 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d86bd30e-374a-4a76-be08-89a4e3310b61-config\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.263008 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t75hv\" (UniqueName: \"kubernetes.io/projected/d86bd30e-374a-4a76-be08-89a4e3310b61-kube-api-access-t75hv\") pod \"neutron-6f7b8b49c9-7g7fc\" (UID: \"d86bd30e-374a-4a76-be08-89a4e3310b61\") " pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.558350 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.628683 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" event={"ID":"189d4587-3dd1-495a-9906-99f66131135c","Type":"ContainerStarted","Data":"735c4826b008d0cce9697c8b7db3b4f6f292cf54edc95b0b1aadcf77f9d7a281"} Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.636194 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"985c25c3-b874-4132-a5d8-366ab5e0fab6","Type":"ContainerStarted","Data":"ef7440fc8cb442af497d26ab77fba2a5f1108a845292cf3264265ccced90e311"} Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.650898 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" event={"ID":"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6","Type":"ContainerStarted","Data":"a755e3dd482ce4d1dd42b67cb1f4c4843b5f6f4f52d3104f52695bfa6c24471a"} Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.656147 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gc599" event={"ID":"3a0871c4-f877-4382-8458-cc41ff21f67d","Type":"ContainerStarted","Data":"964f21d2083a9efca7fbfaefc75ebc2e3ccbd39df125bdbd992bc72d28886f71"} Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.671677 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:45:25 crc kubenswrapper[4612]: I1203 07:45:25.874430 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-79dcc9984-j6bsp"] Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.297797 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6f7b8b49c9-7g7fc"] Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.728916 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55fc5c6c94-pjh5s" event={"ID":"29d52104-a465-4ca0-a040-d9dba9e47600","Type":"ContainerStarted","Data":"59d1c0b708def9bcaa0ff30604b6f69b90dc88a40d4c4565b1e6c09b0bb13d74"} Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.729219 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55fc5c6c94-pjh5s" event={"ID":"29d52104-a465-4ca0-a040-d9dba9e47600","Type":"ContainerStarted","Data":"05e97f8a76b8e3cae806ed2455f3fabd1d92a3a149da90aa93edb0555f382d15"} Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.735082 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"00a52936-281e-497d-a9c4-9216dcc33120","Type":"ContainerStarted","Data":"aed0a2eb71730ac7a483066e588f1af70756446d0c35418691b93e1de1dbfb48"} Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.746086 4612 generic.go:334] "Generic (PLEG): container finished" podID="189d4587-3dd1-495a-9906-99f66131135c" containerID="2d257237a7e4f024371b0bcbd2937d51660398ef14e8a8b712e4d65892f55d3c" exitCode=0 Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.746180 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" event={"ID":"189d4587-3dd1-495a-9906-99f66131135c","Type":"ContainerDied","Data":"2d257237a7e4f024371b0bcbd2937d51660398ef14e8a8b712e4d65892f55d3c"} Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.752414 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-767d79bd88-5spkc" event={"ID":"7462fb55-15b7-4416-b34f-23893766b5ed","Type":"ContainerStarted","Data":"4ab7257fb81ce6e4c6ccbe3f788b4aeba206cf28cb86b4855a2f3df4b1918a01"} Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.752465 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-767d79bd88-5spkc" event={"ID":"7462fb55-15b7-4416-b34f-23893766b5ed","Type":"ContainerStarted","Data":"5f382c79b6c5dc2c810237d3351ca896247a02c86f54a420b90aa489990e83c8"} Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.782092 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-79dcc9984-j6bsp" event={"ID":"871a775f-4f35-4128-8198-8bfa7df3ea61","Type":"ContainerStarted","Data":"64aeab9d82e8506fbc885cd93e574aa85766e5145a0e7cdb5278933455055721"} Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.782140 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-79dcc9984-j6bsp" event={"ID":"871a775f-4f35-4128-8198-8bfa7df3ea61","Type":"ContainerStarted","Data":"ebb9b2e5a5f58b764116d34e3b0ce4c7b054e23f17257b40b47db13d70b5f869"} Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.800004 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-55fc5c6c94-pjh5s" podStartSLOduration=26.279561134 podStartE2EDuration="26.799979982s" podCreationTimestamp="2025-12-03 07:45:00 +0000 UTC" firstStartedPulling="2025-12-03 07:45:24.352726938 +0000 UTC m=+1087.526084338" lastFinishedPulling="2025-12-03 07:45:24.873145786 +0000 UTC m=+1088.046503186" observedRunningTime="2025-12-03 07:45:26.768280012 +0000 UTC m=+1089.941637432" watchObservedRunningTime="2025-12-03 07:45:26.799979982 +0000 UTC m=+1089.973337372" Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.801235 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-767d79bd88-5spkc" podStartSLOduration=26.089721784 podStartE2EDuration="26.801209323s" podCreationTimestamp="2025-12-03 07:45:00 +0000 UTC" firstStartedPulling="2025-12-03 07:45:24.39251431 +0000 UTC m=+1087.565871710" lastFinishedPulling="2025-12-03 07:45:25.104001849 +0000 UTC m=+1088.277359249" observedRunningTime="2025-12-03 07:45:26.79506256 +0000 UTC m=+1089.968419980" watchObservedRunningTime="2025-12-03 07:45:26.801209323 +0000 UTC m=+1089.974566723" Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.804044 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"985c25c3-b874-4132-a5d8-366ab5e0fab6","Type":"ContainerStarted","Data":"e0badb7ce14b685f090002a5b47794913e81e794ec5d65f79e999e088a621874"} Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.809750 4612 generic.go:334] "Generic (PLEG): container finished" podID="5b731854-5aa4-4cfd-b4aa-ea210d24c3a6" containerID="2817990cf1a9567157bc4783c7855485777f1e5b7e75a8f4cc6ac1daca9e0b60" exitCode=0 Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.809824 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" event={"ID":"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6","Type":"ContainerDied","Data":"2817990cf1a9567157bc4783c7855485777f1e5b7e75a8f4cc6ac1daca9e0b60"} Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.813701 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gc599" event={"ID":"3a0871c4-f877-4382-8458-cc41ff21f67d","Type":"ContainerStarted","Data":"46f45e268c333bcea38f9a80e4fbb4eb5f18dae0ed845ee802df9bea3df4c811"} Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.827370 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f7b8b49c9-7g7fc" event={"ID":"d86bd30e-374a-4a76-be08-89a4e3310b61","Type":"ContainerStarted","Data":"c65fe726c0ddd6afb79eaeaea5df99ba600427760c53eb996ab41de05c679790"} Dec 03 07:45:26 crc kubenswrapper[4612]: I1203 07:45:26.864201 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-gc599" podStartSLOduration=13.864171802 podStartE2EDuration="13.864171802s" podCreationTimestamp="2025-12-03 07:45:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:45:26.844649595 +0000 UTC m=+1090.018007005" watchObservedRunningTime="2025-12-03 07:45:26.864171802 +0000 UTC m=+1090.037529222" Dec 03 07:45:27 crc kubenswrapper[4612]: I1203 07:45:27.879315 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f7b8b49c9-7g7fc" event={"ID":"d86bd30e-374a-4a76-be08-89a4e3310b61","Type":"ContainerStarted","Data":"d79c8315f504eb471a52691b2d712bd066783dea976a9f562ceb01b81c95cbec"} Dec 03 07:45:27 crc kubenswrapper[4612]: I1203 07:45:27.882588 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-79dcc9984-j6bsp" event={"ID":"871a775f-4f35-4128-8198-8bfa7df3ea61","Type":"ContainerStarted","Data":"3de0aa6accc09f941b04e9adde46501382d2a7d186b39ba220132f2a9ca15b37"} Dec 03 07:45:27 crc kubenswrapper[4612]: I1203 07:45:27.884667 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:27 crc kubenswrapper[4612]: I1203 07:45:27.903289 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" event={"ID":"189d4587-3dd1-495a-9906-99f66131135c","Type":"ContainerStarted","Data":"6c17e4a097df9dc6bb89e1521728fe03320abde2188574082fdc471fb4aaf05b"} Dec 03 07:45:27 crc kubenswrapper[4612]: I1203 07:45:27.904163 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:27 crc kubenswrapper[4612]: I1203 07:45:27.913038 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"985c25c3-b874-4132-a5d8-366ab5e0fab6","Type":"ContainerStarted","Data":"f6d236d170ce6dec577c0f5c323df170cd9f609ae91ddf240e73b257852e47c3"} Dec 03 07:45:27 crc kubenswrapper[4612]: I1203 07:45:27.931938 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-79dcc9984-j6bsp" podStartSLOduration=5.93191473 podStartE2EDuration="5.93191473s" podCreationTimestamp="2025-12-03 07:45:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:45:27.920993328 +0000 UTC m=+1091.094350748" watchObservedRunningTime="2025-12-03 07:45:27.93191473 +0000 UTC m=+1091.105272130" Dec 03 07:45:27 crc kubenswrapper[4612]: I1203 07:45:27.934470 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"00a52936-281e-497d-a9c4-9216dcc33120","Type":"ContainerStarted","Data":"c797e95863d479c36f0667bacdc48947aee38f56fe99fa0ad04b4265532b5cb5"} Dec 03 07:45:27 crc kubenswrapper[4612]: I1203 07:45:27.949021 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" podStartSLOduration=5.949002826 podStartE2EDuration="5.949002826s" podCreationTimestamp="2025-12-03 07:45:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:45:27.946109244 +0000 UTC m=+1091.119466644" watchObservedRunningTime="2025-12-03 07:45:27.949002826 +0000 UTC m=+1091.122360236" Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.040975 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.040935447 podStartE2EDuration="7.040935447s" podCreationTimestamp="2025-12-03 07:45:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:45:27.971369153 +0000 UTC m=+1091.144726573" watchObservedRunningTime="2025-12-03 07:45:28.040935447 +0000 UTC m=+1091.214292857" Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.643378 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.667064 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-secret-volume\") pod \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\" (UID: \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\") " Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.667134 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-config-volume\") pod \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\" (UID: \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\") " Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.667205 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2ss9\" (UniqueName: \"kubernetes.io/projected/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-kube-api-access-w2ss9\") pod \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\" (UID: \"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6\") " Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.669605 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-config-volume" (OuterVolumeSpecName: "config-volume") pod "5b731854-5aa4-4cfd-b4aa-ea210d24c3a6" (UID: "5b731854-5aa4-4cfd-b4aa-ea210d24c3a6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.676137 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-kube-api-access-w2ss9" (OuterVolumeSpecName: "kube-api-access-w2ss9") pod "5b731854-5aa4-4cfd-b4aa-ea210d24c3a6" (UID: "5b731854-5aa4-4cfd-b4aa-ea210d24c3a6"). InnerVolumeSpecName "kube-api-access-w2ss9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.676597 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5b731854-5aa4-4cfd-b4aa-ea210d24c3a6" (UID: "5b731854-5aa4-4cfd-b4aa-ea210d24c3a6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.768723 4612 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.768753 4612 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.768766 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2ss9\" (UniqueName: \"kubernetes.io/projected/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6-kube-api-access-w2ss9\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.947888 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" event={"ID":"5b731854-5aa4-4cfd-b4aa-ea210d24c3a6","Type":"ContainerDied","Data":"a755e3dd482ce4d1dd42b67cb1f4c4843b5f6f4f52d3104f52695bfa6c24471a"} Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.948227 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a755e3dd482ce4d1dd42b67cb1f4c4843b5f6f4f52d3104f52695bfa6c24471a" Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.948286 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg" Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.956110 4612 generic.go:334] "Generic (PLEG): container finished" podID="0600693c-3ac0-4d42-8efd-c3140c6474a8" containerID="34d6daa6f15b1c60dccf87c84047399bc6abe70b6604de7104cfcbaf26ab6442" exitCode=0 Dec 03 07:45:28 crc kubenswrapper[4612]: I1203 07:45:28.956749 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-pqqrz" event={"ID":"0600693c-3ac0-4d42-8efd-c3140c6474a8","Type":"ContainerDied","Data":"34d6daa6f15b1c60dccf87c84047399bc6abe70b6604de7104cfcbaf26ab6442"} Dec 03 07:45:29 crc kubenswrapper[4612]: I1203 07:45:29.966331 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f7b8b49c9-7g7fc" event={"ID":"d86bd30e-374a-4a76-be08-89a4e3310b61","Type":"ContainerStarted","Data":"79b1e3ef8eb9e83a447bbcdf423ba3c3b569666d287fe76afbfc80f88d374cd1"} Dec 03 07:45:29 crc kubenswrapper[4612]: I1203 07:45:29.966677 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:29 crc kubenswrapper[4612]: I1203 07:45:29.967931 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8","Type":"ContainerStarted","Data":"063a61813af9666e725eaa514dfdbc404ef6acfb9ae4192c67ebfbecec10979b"} Dec 03 07:45:29 crc kubenswrapper[4612]: I1203 07:45:29.970455 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"00a52936-281e-497d-a9c4-9216dcc33120","Type":"ContainerStarted","Data":"c7fbfa80703b9a78a5c894b5a8f0fd108323def1d604f1ca94cc6327dd27d0b1"} Dec 03 07:45:29 crc kubenswrapper[4612]: I1203 07:45:29.999403 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6f7b8b49c9-7g7fc" podStartSLOduration=5.999379569 podStartE2EDuration="5.999379569s" podCreationTimestamp="2025-12-03 07:45:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:45:29.997691467 +0000 UTC m=+1093.171048907" watchObservedRunningTime="2025-12-03 07:45:29.999379569 +0000 UTC m=+1093.172736969" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.033508 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=9.033484959 podStartE2EDuration="9.033484959s" podCreationTimestamp="2025-12-03 07:45:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:45:30.027377957 +0000 UTC m=+1093.200735377" watchObservedRunningTime="2025-12-03 07:45:30.033484959 +0000 UTC m=+1093.206842379" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.421409 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-pqqrz" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.599573 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-combined-ca-bundle\") pod \"0600693c-3ac0-4d42-8efd-c3140c6474a8\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.600150 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0600693c-3ac0-4d42-8efd-c3140c6474a8-logs\") pod \"0600693c-3ac0-4d42-8efd-c3140c6474a8\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.600280 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lk98f\" (UniqueName: \"kubernetes.io/projected/0600693c-3ac0-4d42-8efd-c3140c6474a8-kube-api-access-lk98f\") pod \"0600693c-3ac0-4d42-8efd-c3140c6474a8\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.600311 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-config-data\") pod \"0600693c-3ac0-4d42-8efd-c3140c6474a8\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.600411 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-scripts\") pod \"0600693c-3ac0-4d42-8efd-c3140c6474a8\" (UID: \"0600693c-3ac0-4d42-8efd-c3140c6474a8\") " Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.601124 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0600693c-3ac0-4d42-8efd-c3140c6474a8-logs" (OuterVolumeSpecName: "logs") pod "0600693c-3ac0-4d42-8efd-c3140c6474a8" (UID: "0600693c-3ac0-4d42-8efd-c3140c6474a8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.601390 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0600693c-3ac0-4d42-8efd-c3140c6474a8-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.606064 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-scripts" (OuterVolumeSpecName: "scripts") pod "0600693c-3ac0-4d42-8efd-c3140c6474a8" (UID: "0600693c-3ac0-4d42-8efd-c3140c6474a8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.619198 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0600693c-3ac0-4d42-8efd-c3140c6474a8-kube-api-access-lk98f" (OuterVolumeSpecName: "kube-api-access-lk98f") pod "0600693c-3ac0-4d42-8efd-c3140c6474a8" (UID: "0600693c-3ac0-4d42-8efd-c3140c6474a8"). InnerVolumeSpecName "kube-api-access-lk98f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.641068 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0600693c-3ac0-4d42-8efd-c3140c6474a8" (UID: "0600693c-3ac0-4d42-8efd-c3140c6474a8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.671176 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-config-data" (OuterVolumeSpecName: "config-data") pod "0600693c-3ac0-4d42-8efd-c3140c6474a8" (UID: "0600693c-3ac0-4d42-8efd-c3140c6474a8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.702521 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.702554 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lk98f\" (UniqueName: \"kubernetes.io/projected/0600693c-3ac0-4d42-8efd-c3140c6474a8-kube-api-access-lk98f\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.702565 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.702575 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0600693c-3ac0-4d42-8efd-c3140c6474a8-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.776419 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.776482 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.804123 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.804177 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.982706 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-pqqrz" event={"ID":"0600693c-3ac0-4d42-8efd-c3140c6474a8","Type":"ContainerDied","Data":"a7f1ea0b03956c55f1a2805e05d4d44d25588cd312228e18f4a0bcad3d595359"} Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.982758 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7f1ea0b03956c55f1a2805e05d4d44d25588cd312228e18f4a0bcad3d595359" Dec 03 07:45:30 crc kubenswrapper[4612]: I1203 07:45:30.982825 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-pqqrz" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.166198 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5cd4567696-hsfd9"] Dec 03 07:45:31 crc kubenswrapper[4612]: E1203 07:45:31.166521 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b731854-5aa4-4cfd-b4aa-ea210d24c3a6" containerName="collect-profiles" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.166537 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b731854-5aa4-4cfd-b4aa-ea210d24c3a6" containerName="collect-profiles" Dec 03 07:45:31 crc kubenswrapper[4612]: E1203 07:45:31.166557 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0600693c-3ac0-4d42-8efd-c3140c6474a8" containerName="placement-db-sync" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.166563 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="0600693c-3ac0-4d42-8efd-c3140c6474a8" containerName="placement-db-sync" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.166723 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="0600693c-3ac0-4d42-8efd-c3140c6474a8" containerName="placement-db-sync" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.166735 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b731854-5aa4-4cfd-b4aa-ea210d24c3a6" containerName="collect-profiles" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.167642 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.171319 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.171481 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.171584 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-9sf96" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.171733 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.173428 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.194356 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5cd4567696-hsfd9"] Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.327117 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zptl\" (UniqueName: \"kubernetes.io/projected/1f9cee01-af98-4b34-b263-ae543c237e0b-kube-api-access-9zptl\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.327179 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-config-data\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.327206 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-combined-ca-bundle\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.327349 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-scripts\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.327584 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-internal-tls-certs\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.327658 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f9cee01-af98-4b34-b263-ae543c237e0b-logs\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.327849 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-public-tls-certs\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.429385 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f9cee01-af98-4b34-b263-ae543c237e0b-logs\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.429483 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-public-tls-certs\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.429522 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zptl\" (UniqueName: \"kubernetes.io/projected/1f9cee01-af98-4b34-b263-ae543c237e0b-kube-api-access-9zptl\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.429572 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-config-data\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.429598 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-combined-ca-bundle\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.429632 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-scripts\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.429687 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-internal-tls-certs\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.430292 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f9cee01-af98-4b34-b263-ae543c237e0b-logs\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.432821 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-internal-tls-certs\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.433684 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-config-data\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.433917 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-public-tls-certs\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.438559 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-combined-ca-bundle\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.440203 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f9cee01-af98-4b34-b263-ae543c237e0b-scripts\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.452329 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zptl\" (UniqueName: \"kubernetes.io/projected/1f9cee01-af98-4b34-b263-ae543c237e0b-kube-api-access-9zptl\") pod \"placement-5cd4567696-hsfd9\" (UID: \"1f9cee01-af98-4b34-b263-ae543c237e0b\") " pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:31 crc kubenswrapper[4612]: I1203 07:45:31.493802 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:32 crc kubenswrapper[4612]: I1203 07:45:32.140501 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:32 crc kubenswrapper[4612]: I1203 07:45:32.140852 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:32 crc kubenswrapper[4612]: I1203 07:45:32.229305 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 07:45:32 crc kubenswrapper[4612]: I1203 07:45:32.229370 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 07:45:32 crc kubenswrapper[4612]: I1203 07:45:32.359399 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 07:45:32 crc kubenswrapper[4612]: I1203 07:45:32.360450 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 07:45:32 crc kubenswrapper[4612]: I1203 07:45:32.361756 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:32 crc kubenswrapper[4612]: I1203 07:45:32.364512 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:32 crc kubenswrapper[4612]: I1203 07:45:32.895250 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5cd4567696-hsfd9"] Dec 03 07:45:32 crc kubenswrapper[4612]: W1203 07:45:32.904680 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f9cee01_af98_4b34_b263_ae543c237e0b.slice/crio-ba3d1b8ced6f43f29af54d8c060530afa7b92cefdf22c2f05465af5afa3d94f2 WatchSource:0}: Error finding container ba3d1b8ced6f43f29af54d8c060530afa7b92cefdf22c2f05465af5afa3d94f2: Status 404 returned error can't find the container with id ba3d1b8ced6f43f29af54d8c060530afa7b92cefdf22c2f05465af5afa3d94f2 Dec 03 07:45:32 crc kubenswrapper[4612]: I1203 07:45:32.915081 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:45:32 crc kubenswrapper[4612]: I1203 07:45:32.992586 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-mhkbf"] Dec 03 07:45:32 crc kubenswrapper[4612]: I1203 07:45:32.992816 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" podUID="870f8c82-2018-4c5a-ad52-5066ef6211ad" containerName="dnsmasq-dns" containerID="cri-o://b276ec1289350e95461371f20320dba8c5b7f3c490d23b097491b6a923a3fa1e" gracePeriod=10 Dec 03 07:45:33 crc kubenswrapper[4612]: I1203 07:45:33.014859 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5cd4567696-hsfd9" event={"ID":"1f9cee01-af98-4b34-b263-ae543c237e0b","Type":"ContainerStarted","Data":"ba3d1b8ced6f43f29af54d8c060530afa7b92cefdf22c2f05465af5afa3d94f2"} Dec 03 07:45:33 crc kubenswrapper[4612]: I1203 07:45:33.014899 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 07:45:33 crc kubenswrapper[4612]: I1203 07:45:33.026469 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:33 crc kubenswrapper[4612]: I1203 07:45:33.026504 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:33 crc kubenswrapper[4612]: I1203 07:45:33.027434 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 07:45:33 crc kubenswrapper[4612]: I1203 07:45:33.574280 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" podUID="870f8c82-2018-4c5a-ad52-5066ef6211ad" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.137:5353: connect: connection refused" Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.101454 4612 generic.go:334] "Generic (PLEG): container finished" podID="870f8c82-2018-4c5a-ad52-5066ef6211ad" containerID="b276ec1289350e95461371f20320dba8c5b7f3c490d23b097491b6a923a3fa1e" exitCode=0 Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.101824 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" event={"ID":"870f8c82-2018-4c5a-ad52-5066ef6211ad","Type":"ContainerDied","Data":"b276ec1289350e95461371f20320dba8c5b7f3c490d23b097491b6a923a3fa1e"} Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.153671 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5cd4567696-hsfd9" event={"ID":"1f9cee01-af98-4b34-b263-ae543c237e0b","Type":"ContainerStarted","Data":"f7fbaab452e40587bfd8677d0342da894380964122bdacab93419aa22e6c2b16"} Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.153987 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5cd4567696-hsfd9" event={"ID":"1f9cee01-af98-4b34-b263-ae543c237e0b","Type":"ContainerStarted","Data":"93465651056de48cbb9b5c841cb4acf8c3a28c4a1c907607275d2dae602bc8d2"} Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.212289 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5cd4567696-hsfd9" podStartSLOduration=3.212262722 podStartE2EDuration="3.212262722s" podCreationTimestamp="2025-12-03 07:45:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:45:34.188608793 +0000 UTC m=+1097.361966203" watchObservedRunningTime="2025-12-03 07:45:34.212262722 +0000 UTC m=+1097.385620132" Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.232205 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.289724 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-config\") pod \"870f8c82-2018-4c5a-ad52-5066ef6211ad\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.289787 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-dns-swift-storage-0\") pod \"870f8c82-2018-4c5a-ad52-5066ef6211ad\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.289830 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-ovsdbserver-sb\") pod \"870f8c82-2018-4c5a-ad52-5066ef6211ad\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.289949 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kk7m8\" (UniqueName: \"kubernetes.io/projected/870f8c82-2018-4c5a-ad52-5066ef6211ad-kube-api-access-kk7m8\") pod \"870f8c82-2018-4c5a-ad52-5066ef6211ad\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.290013 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-dns-svc\") pod \"870f8c82-2018-4c5a-ad52-5066ef6211ad\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.290086 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-ovsdbserver-nb\") pod \"870f8c82-2018-4c5a-ad52-5066ef6211ad\" (UID: \"870f8c82-2018-4c5a-ad52-5066ef6211ad\") " Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.342905 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/870f8c82-2018-4c5a-ad52-5066ef6211ad-kube-api-access-kk7m8" (OuterVolumeSpecName: "kube-api-access-kk7m8") pod "870f8c82-2018-4c5a-ad52-5066ef6211ad" (UID: "870f8c82-2018-4c5a-ad52-5066ef6211ad"). InnerVolumeSpecName "kube-api-access-kk7m8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.398924 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "870f8c82-2018-4c5a-ad52-5066ef6211ad" (UID: "870f8c82-2018-4c5a-ad52-5066ef6211ad"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.406923 4612 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.407057 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kk7m8\" (UniqueName: \"kubernetes.io/projected/870f8c82-2018-4c5a-ad52-5066ef6211ad-kube-api-access-kk7m8\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.420012 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "870f8c82-2018-4c5a-ad52-5066ef6211ad" (UID: "870f8c82-2018-4c5a-ad52-5066ef6211ad"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.429062 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "870f8c82-2018-4c5a-ad52-5066ef6211ad" (UID: "870f8c82-2018-4c5a-ad52-5066ef6211ad"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.451526 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-config" (OuterVolumeSpecName: "config") pod "870f8c82-2018-4c5a-ad52-5066ef6211ad" (UID: "870f8c82-2018-4c5a-ad52-5066ef6211ad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.458710 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "870f8c82-2018-4c5a-ad52-5066ef6211ad" (UID: "870f8c82-2018-4c5a-ad52-5066ef6211ad"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.508475 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.508515 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.508529 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:34 crc kubenswrapper[4612]: I1203 07:45:34.508541 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/870f8c82-2018-4c5a-ad52-5066ef6211ad-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.164631 4612 generic.go:334] "Generic (PLEG): container finished" podID="3a0871c4-f877-4382-8458-cc41ff21f67d" containerID="46f45e268c333bcea38f9a80e4fbb4eb5f18dae0ed845ee802df9bea3df4c811" exitCode=0 Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.164722 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gc599" event={"ID":"3a0871c4-f877-4382-8458-cc41ff21f67d","Type":"ContainerDied","Data":"46f45e268c333bcea38f9a80e4fbb4eb5f18dae0ed845ee802df9bea3df4c811"} Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.169709 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-ptq9j" event={"ID":"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff","Type":"ContainerStarted","Data":"049fb7ab852cbc63d04cc162f819fc8f83c2e03c36871c3487b835ada5813e8f"} Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.174865 4612 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.175146 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" event={"ID":"870f8c82-2018-4c5a-ad52-5066ef6211ad","Type":"ContainerDied","Data":"e3c750b0332d764954268e63166efc3eb4718a67fb3ec72f0bd6494bb1ddafeb"} Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.175202 4612 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.175219 4612 scope.go:117] "RemoveContainer" containerID="b276ec1289350e95461371f20320dba8c5b7f3c490d23b097491b6a923a3fa1e" Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.175271 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.175222 4612 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.175336 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-mhkbf" Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.175841 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.213210 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-ptq9j" podStartSLOduration=2.982781107 podStartE2EDuration="47.213192145s" podCreationTimestamp="2025-12-03 07:44:48 +0000 UTC" firstStartedPulling="2025-12-03 07:44:50.445893488 +0000 UTC m=+1053.619250888" lastFinishedPulling="2025-12-03 07:45:34.676304526 +0000 UTC m=+1097.849661926" observedRunningTime="2025-12-03 07:45:35.20254352 +0000 UTC m=+1098.375900920" watchObservedRunningTime="2025-12-03 07:45:35.213192145 +0000 UTC m=+1098.386549545" Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.223099 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-mhkbf"] Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.233901 4612 scope.go:117] "RemoveContainer" containerID="3a062c566bfd12e28457e85363de64510aec37db09ccc863d19f02c76647b169" Dec 03 07:45:35 crc kubenswrapper[4612]: I1203 07:45:35.235686 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-mhkbf"] Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.802732 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.803362 4612 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.817576 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.817697 4612 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.821358 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.825691 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.845856 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.954692 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vv8qf\" (UniqueName: \"kubernetes.io/projected/3a0871c4-f877-4382-8458-cc41ff21f67d-kube-api-access-vv8qf\") pod \"3a0871c4-f877-4382-8458-cc41ff21f67d\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.954775 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-credential-keys\") pod \"3a0871c4-f877-4382-8458-cc41ff21f67d\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.954861 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-combined-ca-bundle\") pod \"3a0871c4-f877-4382-8458-cc41ff21f67d\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.954882 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-fernet-keys\") pod \"3a0871c4-f877-4382-8458-cc41ff21f67d\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.954901 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-config-data\") pod \"3a0871c4-f877-4382-8458-cc41ff21f67d\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.954930 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-scripts\") pod \"3a0871c4-f877-4382-8458-cc41ff21f67d\" (UID: \"3a0871c4-f877-4382-8458-cc41ff21f67d\") " Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.971379 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-scripts" (OuterVolumeSpecName: "scripts") pod "3a0871c4-f877-4382-8458-cc41ff21f67d" (UID: "3a0871c4-f877-4382-8458-cc41ff21f67d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:36 crc kubenswrapper[4612]: I1203 07:45:36.985703 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3a0871c4-f877-4382-8458-cc41ff21f67d" (UID: "3a0871c4-f877-4382-8458-cc41ff21f67d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.025069 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3a0871c4-f877-4382-8458-cc41ff21f67d" (UID: "3a0871c4-f877-4382-8458-cc41ff21f67d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.025116 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a0871c4-f877-4382-8458-cc41ff21f67d-kube-api-access-vv8qf" (OuterVolumeSpecName: "kube-api-access-vv8qf") pod "3a0871c4-f877-4382-8458-cc41ff21f67d" (UID: "3a0871c4-f877-4382-8458-cc41ff21f67d"). InnerVolumeSpecName "kube-api-access-vv8qf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.072129 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a0871c4-f877-4382-8458-cc41ff21f67d" (UID: "3a0871c4-f877-4382-8458-cc41ff21f67d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.073177 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.073207 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vv8qf\" (UniqueName: \"kubernetes.io/projected/3a0871c4-f877-4382-8458-cc41ff21f67d-kube-api-access-vv8qf\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.073219 4612 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.073228 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.073237 4612 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.117143 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-config-data" (OuterVolumeSpecName: "config-data") pod "3a0871c4-f877-4382-8458-cc41ff21f67d" (UID: "3a0871c4-f877-4382-8458-cc41ff21f67d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.119487 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="870f8c82-2018-4c5a-ad52-5066ef6211ad" path="/var/lib/kubelet/pods/870f8c82-2018-4c5a-ad52-5066ef6211ad/volumes" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.176028 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a0871c4-f877-4382-8458-cc41ff21f67d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.256686 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gc599" event={"ID":"3a0871c4-f877-4382-8458-cc41ff21f67d","Type":"ContainerDied","Data":"964f21d2083a9efca7fbfaefc75ebc2e3ccbd39df125bdbd992bc72d28886f71"} Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.256734 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="964f21d2083a9efca7fbfaefc75ebc2e3ccbd39df125bdbd992bc72d28886f71" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.256612 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gc599" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.348029 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-664f766786-5w95f"] Dec 03 07:45:37 crc kubenswrapper[4612]: E1203 07:45:37.348662 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="870f8c82-2018-4c5a-ad52-5066ef6211ad" containerName="dnsmasq-dns" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.348680 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="870f8c82-2018-4c5a-ad52-5066ef6211ad" containerName="dnsmasq-dns" Dec 03 07:45:37 crc kubenswrapper[4612]: E1203 07:45:37.348698 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a0871c4-f877-4382-8458-cc41ff21f67d" containerName="keystone-bootstrap" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.348706 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a0871c4-f877-4382-8458-cc41ff21f67d" containerName="keystone-bootstrap" Dec 03 07:45:37 crc kubenswrapper[4612]: E1203 07:45:37.348712 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="870f8c82-2018-4c5a-ad52-5066ef6211ad" containerName="init" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.348719 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="870f8c82-2018-4c5a-ad52-5066ef6211ad" containerName="init" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.348893 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a0871c4-f877-4382-8458-cc41ff21f67d" containerName="keystone-bootstrap" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.348914 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="870f8c82-2018-4c5a-ad52-5066ef6211ad" containerName="dnsmasq-dns" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.349533 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.354476 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.354754 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-m29rd" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.354896 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.355704 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-664f766786-5w95f"] Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.356093 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.356096 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.361287 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.484370 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-combined-ca-bundle\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.484474 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-internal-tls-certs\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.484496 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-config-data\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.484515 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-fernet-keys\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.484536 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-public-tls-certs\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.484570 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmw5r\" (UniqueName: \"kubernetes.io/projected/0b3c004c-b094-4f83-b5c0-35fd59313980-kube-api-access-tmw5r\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.484588 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-credential-keys\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.484614 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-scripts\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.589953 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-internal-tls-certs\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.590020 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-config-data\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.590042 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-fernet-keys\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.590066 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-public-tls-certs\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.590106 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmw5r\" (UniqueName: \"kubernetes.io/projected/0b3c004c-b094-4f83-b5c0-35fd59313980-kube-api-access-tmw5r\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.590130 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-credential-keys\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.590159 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-scripts\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.590198 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-combined-ca-bundle\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.603597 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-public-tls-certs\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.604828 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-credential-keys\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.604833 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-fernet-keys\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.606464 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-config-data\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.606666 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-internal-tls-certs\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.608318 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-scripts\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.615852 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b3c004c-b094-4f83-b5c0-35fd59313980-combined-ca-bundle\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.617232 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmw5r\" (UniqueName: \"kubernetes.io/projected/0b3c004c-b094-4f83-b5c0-35fd59313980-kube-api-access-tmw5r\") pod \"keystone-664f766786-5w95f\" (UID: \"0b3c004c-b094-4f83-b5c0-35fd59313980\") " pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:37 crc kubenswrapper[4612]: I1203 07:45:37.669384 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:40 crc kubenswrapper[4612]: I1203 07:45:40.778245 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-767d79bd88-5spkc" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 03 07:45:40 crc kubenswrapper[4612]: I1203 07:45:40.806352 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-55fc5c6c94-pjh5s" podUID="29d52104-a465-4ca0-a040-d9dba9e47600" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Dec 03 07:45:41 crc kubenswrapper[4612]: I1203 07:45:41.315806 4612 generic.go:334] "Generic (PLEG): container finished" podID="2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff" containerID="049fb7ab852cbc63d04cc162f819fc8f83c2e03c36871c3487b835ada5813e8f" exitCode=0 Dec 03 07:45:41 crc kubenswrapper[4612]: I1203 07:45:41.315879 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-ptq9j" event={"ID":"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff","Type":"ContainerDied","Data":"049fb7ab852cbc63d04cc162f819fc8f83c2e03c36871c3487b835ada5813e8f"} Dec 03 07:45:42 crc kubenswrapper[4612]: I1203 07:45:42.808854 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-664f766786-5w95f"] Dec 03 07:45:42 crc kubenswrapper[4612]: I1203 07:45:42.809715 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-ptq9j" Dec 03 07:45:42 crc kubenswrapper[4612]: I1203 07:45:42.915432 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2j4vj\" (UniqueName: \"kubernetes.io/projected/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-kube-api-access-2j4vj\") pod \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\" (UID: \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\") " Dec 03 07:45:42 crc kubenswrapper[4612]: I1203 07:45:42.915568 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-db-sync-config-data\") pod \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\" (UID: \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\") " Dec 03 07:45:42 crc kubenswrapper[4612]: I1203 07:45:42.915595 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-combined-ca-bundle\") pod \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\" (UID: \"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff\") " Dec 03 07:45:42 crc kubenswrapper[4612]: I1203 07:45:42.919826 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-kube-api-access-2j4vj" (OuterVolumeSpecName: "kube-api-access-2j4vj") pod "2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff" (UID: "2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff"). InnerVolumeSpecName "kube-api-access-2j4vj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:45:42 crc kubenswrapper[4612]: I1203 07:45:42.922049 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff" (UID: "2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:42 crc kubenswrapper[4612]: I1203 07:45:42.949144 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff" (UID: "2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.017991 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2j4vj\" (UniqueName: \"kubernetes.io/projected/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-kube-api-access-2j4vj\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.018021 4612 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.018033 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.337472 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8","Type":"ContainerStarted","Data":"b6589bf3050524be6c4383f178d87017265e99ae240485a258e90683cae9fab9"} Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.339390 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-ptq9j" event={"ID":"2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff","Type":"ContainerDied","Data":"95424c55a2378c812ba53868d286f36cd406538b31d34703d26a7483a3a95182"} Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.339426 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95424c55a2378c812ba53868d286f36cd406538b31d34703d26a7483a3a95182" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.339490 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-ptq9j" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.354282 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-664f766786-5w95f" event={"ID":"0b3c004c-b094-4f83-b5c0-35fd59313980","Type":"ContainerStarted","Data":"4c8c894b244da46ef467aba6f286c1e270c5a21949c793c71cdabbda81c86cb3"} Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.354325 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-664f766786-5w95f" event={"ID":"0b3c004c-b094-4f83-b5c0-35fd59313980","Type":"ContainerStarted","Data":"d0322d395d8db53edaf45ceaa33632c984b308e65b1fbed4600a0f213bf94dd6"} Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.354972 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-664f766786-5w95f" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.361727 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-nvxkr" event={"ID":"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08","Type":"ContainerStarted","Data":"8b96aee156c0cafacba42f1fdda0703299e6d91b25e2330e57d7bba7e1e49e95"} Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.379560 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-664f766786-5w95f" podStartSLOduration=6.379542826 podStartE2EDuration="6.379542826s" podCreationTimestamp="2025-12-03 07:45:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:45:43.37290619 +0000 UTC m=+1106.546263590" watchObservedRunningTime="2025-12-03 07:45:43.379542826 +0000 UTC m=+1106.552900226" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.397727 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-nvxkr" podStartSLOduration=3.3562455829999998 podStartE2EDuration="56.397706358s" podCreationTimestamp="2025-12-03 07:44:47 +0000 UTC" firstStartedPulling="2025-12-03 07:44:49.239861824 +0000 UTC m=+1052.413219224" lastFinishedPulling="2025-12-03 07:45:42.281322599 +0000 UTC m=+1105.454679999" observedRunningTime="2025-12-03 07:45:43.397148375 +0000 UTC m=+1106.570505775" watchObservedRunningTime="2025-12-03 07:45:43.397706358 +0000 UTC m=+1106.571063768" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.598077 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-f87f554fd-p5qhz"] Dec 03 07:45:43 crc kubenswrapper[4612]: E1203 07:45:43.598529 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff" containerName="barbican-db-sync" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.598550 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff" containerName="barbican-db-sync" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.598790 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff" containerName="barbican-db-sync" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.599893 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.605677 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6c9bf5d979-4pmzw"] Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.606708 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-p7657" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.607051 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.610021 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.627693 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-config-data-custom\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.627733 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-config-data\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.627784 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-config-data-custom\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.627813 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-config-data\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.627851 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj2rj\" (UniqueName: \"kubernetes.io/projected/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-kube-api-access-sj2rj\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.627870 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-combined-ca-bundle\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.627896 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fljn\" (UniqueName: \"kubernetes.io/projected/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-kube-api-access-5fljn\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.627920 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-logs\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.627943 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-logs\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.627987 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-combined-ca-bundle\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.646797 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6c9bf5d979-4pmzw"] Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.650739 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.650980 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.665564 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-f87f554fd-p5qhz"] Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.705999 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-pmd6s"] Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.707463 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.716082 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-pmd6s"] Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.729433 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-config-data\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.729475 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-dns-svc\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.729511 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-config\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.735599 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.735713 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-config-data\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.735744 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-config-data-custom\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.735815 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szwkf\" (UniqueName: \"kubernetes.io/projected/8a7ad504-0ba1-4a87-9197-7c7e6d934273-kube-api-access-szwkf\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.735871 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-config-data\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.735888 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj2rj\" (UniqueName: \"kubernetes.io/projected/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-kube-api-access-sj2rj\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.735919 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-combined-ca-bundle\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.742974 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.743066 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fljn\" (UniqueName: \"kubernetes.io/projected/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-kube-api-access-5fljn\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.743115 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-logs\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.743170 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-logs\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.743191 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.743247 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-combined-ca-bundle\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.743332 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-config-data-custom\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.752174 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-logs\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.752411 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-logs\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.759521 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-config-data-custom\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.762365 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-config-data-custom\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.764370 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-combined-ca-bundle\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.770621 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-combined-ca-bundle\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.778664 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-config-data\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.794675 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fljn\" (UniqueName: \"kubernetes.io/projected/4d772b9f-b6df-4ca8-8a76-f28285eef6b9-kube-api-access-5fljn\") pod \"barbican-keystone-listener-f87f554fd-p5qhz\" (UID: \"4d772b9f-b6df-4ca8-8a76-f28285eef6b9\") " pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.821363 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj2rj\" (UniqueName: \"kubernetes.io/projected/9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16-kube-api-access-sj2rj\") pod \"barbican-worker-6c9bf5d979-4pmzw\" (UID: \"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16\") " pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.871891 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.872120 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-dns-svc\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.872205 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-config\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.872236 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.872338 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szwkf\" (UniqueName: \"kubernetes.io/projected/8a7ad504-0ba1-4a87-9197-7c7e6d934273-kube-api-access-szwkf\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.872412 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.873701 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.874518 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.875212 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-dns-svc\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.875887 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-config\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.876586 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.905914 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5dbdff7bb-vqj8s"] Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.915307 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.920217 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.927859 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.939084 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szwkf\" (UniqueName: \"kubernetes.io/projected/8a7ad504-0ba1-4a87-9197-7c7e6d934273-kube-api-access-szwkf\") pod \"dnsmasq-dns-688c87cc99-pmd6s\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.954746 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6c9bf5d979-4pmzw" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.977635 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5dbdff7bb-vqj8s"] Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.979087 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-config-data-custom\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.979241 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qkhs\" (UniqueName: \"kubernetes.io/projected/72991430-d273-4967-a788-5dfcb67793e1-kube-api-access-6qkhs\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.979358 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-config-data\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.979428 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-combined-ca-bundle\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:43 crc kubenswrapper[4612]: I1203 07:45:43.979620 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72991430-d273-4967-a788-5dfcb67793e1-logs\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:44 crc kubenswrapper[4612]: I1203 07:45:44.087842 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-config-data-custom\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:44 crc kubenswrapper[4612]: I1203 07:45:44.088136 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qkhs\" (UniqueName: \"kubernetes.io/projected/72991430-d273-4967-a788-5dfcb67793e1-kube-api-access-6qkhs\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:44 crc kubenswrapper[4612]: I1203 07:45:44.088277 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-config-data\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:44 crc kubenswrapper[4612]: I1203 07:45:44.088374 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-combined-ca-bundle\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:44 crc kubenswrapper[4612]: I1203 07:45:44.088534 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72991430-d273-4967-a788-5dfcb67793e1-logs\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:44 crc kubenswrapper[4612]: I1203 07:45:44.092411 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72991430-d273-4967-a788-5dfcb67793e1-logs\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:44 crc kubenswrapper[4612]: I1203 07:45:44.094238 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-config-data-custom\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:44 crc kubenswrapper[4612]: I1203 07:45:44.105903 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-combined-ca-bundle\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:44 crc kubenswrapper[4612]: I1203 07:45:44.150713 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qkhs\" (UniqueName: \"kubernetes.io/projected/72991430-d273-4967-a788-5dfcb67793e1-kube-api-access-6qkhs\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:44 crc kubenswrapper[4612]: I1203 07:45:44.151406 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-config-data\") pod \"barbican-api-5dbdff7bb-vqj8s\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:44 crc kubenswrapper[4612]: I1203 07:45:44.199906 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:44 crc kubenswrapper[4612]: I1203 07:45:44.313614 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:44 crc kubenswrapper[4612]: I1203 07:45:44.691893 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6c9bf5d979-4pmzw"] Dec 03 07:45:44 crc kubenswrapper[4612]: I1203 07:45:44.834637 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-f87f554fd-p5qhz"] Dec 03 07:45:45 crc kubenswrapper[4612]: I1203 07:45:45.017148 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-pmd6s"] Dec 03 07:45:45 crc kubenswrapper[4612]: W1203 07:45:45.031247 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a7ad504_0ba1_4a87_9197_7c7e6d934273.slice/crio-a1defcecded044816796432644585e734006570daffaee5ca50dc97bcd72718d WatchSource:0}: Error finding container a1defcecded044816796432644585e734006570daffaee5ca50dc97bcd72718d: Status 404 returned error can't find the container with id a1defcecded044816796432644585e734006570daffaee5ca50dc97bcd72718d Dec 03 07:45:45 crc kubenswrapper[4612]: I1203 07:45:45.230835 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5dbdff7bb-vqj8s"] Dec 03 07:45:45 crc kubenswrapper[4612]: W1203 07:45:45.241052 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72991430_d273_4967_a788_5dfcb67793e1.slice/crio-bcba1a0aedbf505610ecfecfa27a1699e19d5979ed1fe5be5f14eff2deba9d5f WatchSource:0}: Error finding container bcba1a0aedbf505610ecfecfa27a1699e19d5979ed1fe5be5f14eff2deba9d5f: Status 404 returned error can't find the container with id bcba1a0aedbf505610ecfecfa27a1699e19d5979ed1fe5be5f14eff2deba9d5f Dec 03 07:45:45 crc kubenswrapper[4612]: I1203 07:45:45.465219 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dbdff7bb-vqj8s" event={"ID":"72991430-d273-4967-a788-5dfcb67793e1","Type":"ContainerStarted","Data":"bcba1a0aedbf505610ecfecfa27a1699e19d5979ed1fe5be5f14eff2deba9d5f"} Dec 03 07:45:45 crc kubenswrapper[4612]: I1203 07:45:45.479096 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c9bf5d979-4pmzw" event={"ID":"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16","Type":"ContainerStarted","Data":"ea99b328f50b259621f3b7adc7402ac44cd9b62f11edd11268334a9b697b1e4b"} Dec 03 07:45:45 crc kubenswrapper[4612]: I1203 07:45:45.491106 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" event={"ID":"4d772b9f-b6df-4ca8-8a76-f28285eef6b9","Type":"ContainerStarted","Data":"bf95c2652b63d43e9d7c1a910a2833a69ad7e21dbc17245d5d61a22f6c6ffe76"} Dec 03 07:45:45 crc kubenswrapper[4612]: I1203 07:45:45.495160 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" event={"ID":"8a7ad504-0ba1-4a87-9197-7c7e6d934273","Type":"ContainerStarted","Data":"e5ce43d54b8a23e3e9fdba4205120fc7011d044342118a8dc979ecae3b7fad7e"} Dec 03 07:45:45 crc kubenswrapper[4612]: I1203 07:45:45.495203 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" event={"ID":"8a7ad504-0ba1-4a87-9197-7c7e6d934273","Type":"ContainerStarted","Data":"a1defcecded044816796432644585e734006570daffaee5ca50dc97bcd72718d"} Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.509340 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dbdff7bb-vqj8s" event={"ID":"72991430-d273-4967-a788-5dfcb67793e1","Type":"ContainerStarted","Data":"c7581249d0c533a2c88fa5773da20cc6c8dcb1e5a78cd690d45cfec853d1d718"} Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.509903 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dbdff7bb-vqj8s" event={"ID":"72991430-d273-4967-a788-5dfcb67793e1","Type":"ContainerStarted","Data":"5e6a60d48a26d5bb90bff792b975dc2dd087271754c4334dd3e9816843b76a44"} Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.509964 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.509990 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.512455 4612 generic.go:334] "Generic (PLEG): container finished" podID="8a7ad504-0ba1-4a87-9197-7c7e6d934273" containerID="e5ce43d54b8a23e3e9fdba4205120fc7011d044342118a8dc979ecae3b7fad7e" exitCode=0 Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.512495 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" event={"ID":"8a7ad504-0ba1-4a87-9197-7c7e6d934273","Type":"ContainerDied","Data":"e5ce43d54b8a23e3e9fdba4205120fc7011d044342118a8dc979ecae3b7fad7e"} Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.512515 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" event={"ID":"8a7ad504-0ba1-4a87-9197-7c7e6d934273","Type":"ContainerStarted","Data":"8f7cb05b9d3af49770849001c36d685e805846608b1cf7f3b55a96ba7a7ff48c"} Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.514003 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.558874 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5dbdff7bb-vqj8s" podStartSLOduration=3.558854172 podStartE2EDuration="3.558854172s" podCreationTimestamp="2025-12-03 07:45:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:45:46.533279995 +0000 UTC m=+1109.706637395" watchObservedRunningTime="2025-12-03 07:45:46.558854172 +0000 UTC m=+1109.732211572" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.564103 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" podStartSLOduration=3.564082333 podStartE2EDuration="3.564082333s" podCreationTimestamp="2025-12-03 07:45:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:45:46.553255853 +0000 UTC m=+1109.726613253" watchObservedRunningTime="2025-12-03 07:45:46.564082333 +0000 UTC m=+1109.737439733" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.814612 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5687f788c4-jknl6"] Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.829520 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.832830 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.833094 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.851062 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5687f788c4-jknl6"] Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.859896 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-combined-ca-bundle\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.860008 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-config-data\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.860084 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-logs\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.860140 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-config-data-custom\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.860174 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-public-tls-certs\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.860230 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4qfv\" (UniqueName: \"kubernetes.io/projected/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-kube-api-access-p4qfv\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.860298 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-internal-tls-certs\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.962119 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-internal-tls-certs\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.962410 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-combined-ca-bundle\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.962500 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-config-data\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.962605 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-logs\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.962692 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-config-data-custom\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.962766 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-public-tls-certs\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.962860 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4qfv\" (UniqueName: \"kubernetes.io/projected/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-kube-api-access-p4qfv\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.963475 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-logs\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.968827 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-public-tls-certs\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.969574 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-combined-ca-bundle\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.972210 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-config-data-custom\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:46 crc kubenswrapper[4612]: I1203 07:45:46.977482 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-internal-tls-certs\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:47 crc kubenswrapper[4612]: I1203 07:45:47.001821 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-config-data\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:47 crc kubenswrapper[4612]: I1203 07:45:47.009804 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4qfv\" (UniqueName: \"kubernetes.io/projected/ad775971-28a0-4fd6-8e8b-e10e5a9c0c50-kube-api-access-p4qfv\") pod \"barbican-api-5687f788c4-jknl6\" (UID: \"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50\") " pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:47 crc kubenswrapper[4612]: I1203 07:45:47.135838 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:45:47 crc kubenswrapper[4612]: I1203 07:45:47.136204 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:45:47 crc kubenswrapper[4612]: I1203 07:45:47.215341 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:49 crc kubenswrapper[4612]: I1203 07:45:49.030940 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5687f788c4-jknl6"] Dec 03 07:45:49 crc kubenswrapper[4612]: I1203 07:45:49.562252 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" event={"ID":"4d772b9f-b6df-4ca8-8a76-f28285eef6b9","Type":"ContainerStarted","Data":"b7f90bfc9b95bbfff65ddc08816100f824250e2e77b41704815c33833ff46d84"} Dec 03 07:45:49 crc kubenswrapper[4612]: I1203 07:45:49.562714 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" event={"ID":"4d772b9f-b6df-4ca8-8a76-f28285eef6b9","Type":"ContainerStarted","Data":"fc8437e071a531d562957c9a9ccd7cb3302e39cabefa6fc59aa9504cb8c078c1"} Dec 03 07:45:49 crc kubenswrapper[4612]: I1203 07:45:49.568232 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5687f788c4-jknl6" event={"ID":"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50","Type":"ContainerStarted","Data":"0c8d421d855bf90a2042115f1fac6daed2cec663a44e7ed8074deee67de26c7b"} Dec 03 07:45:49 crc kubenswrapper[4612]: I1203 07:45:49.568301 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5687f788c4-jknl6" event={"ID":"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50","Type":"ContainerStarted","Data":"df1e31123c67d45e8c6c92a253ac20fd01ba72d1d7869756fc09e57bc3ef999f"} Dec 03 07:45:49 crc kubenswrapper[4612]: I1203 07:45:49.571014 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c9bf5d979-4pmzw" event={"ID":"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16","Type":"ContainerStarted","Data":"6dea707849960077857ab167ac50404ba359f990b17efa08dadca925c82bb1e4"} Dec 03 07:45:49 crc kubenswrapper[4612]: I1203 07:45:49.571058 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c9bf5d979-4pmzw" event={"ID":"9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16","Type":"ContainerStarted","Data":"a6682e2493a4cc357c28c9f6dff15707d9c819c878773ecf3491b6a93f0dae56"} Dec 03 07:45:49 crc kubenswrapper[4612]: I1203 07:45:49.606384 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-f87f554fd-p5qhz" podStartSLOduration=2.982191432 podStartE2EDuration="6.606361064s" podCreationTimestamp="2025-12-03 07:45:43 +0000 UTC" firstStartedPulling="2025-12-03 07:45:44.848296727 +0000 UTC m=+1108.021654127" lastFinishedPulling="2025-12-03 07:45:48.472466359 +0000 UTC m=+1111.645823759" observedRunningTime="2025-12-03 07:45:49.582648153 +0000 UTC m=+1112.756005563" watchObservedRunningTime="2025-12-03 07:45:49.606361064 +0000 UTC m=+1112.779718484" Dec 03 07:45:49 crc kubenswrapper[4612]: I1203 07:45:49.627297 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6c9bf5d979-4pmzw" podStartSLOduration=2.874639582 podStartE2EDuration="6.627271936s" podCreationTimestamp="2025-12-03 07:45:43 +0000 UTC" firstStartedPulling="2025-12-03 07:45:44.719821035 +0000 UTC m=+1107.893178435" lastFinishedPulling="2025-12-03 07:45:48.472453389 +0000 UTC m=+1111.645810789" observedRunningTime="2025-12-03 07:45:49.601059622 +0000 UTC m=+1112.774417012" watchObservedRunningTime="2025-12-03 07:45:49.627271936 +0000 UTC m=+1112.800629336" Dec 03 07:45:50 crc kubenswrapper[4612]: I1203 07:45:50.597956 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5687f788c4-jknl6" event={"ID":"ad775971-28a0-4fd6-8e8b-e10e5a9c0c50","Type":"ContainerStarted","Data":"a70d4ea7a9ade35f157a72c72815cf1f63351d988e060ee8e8b0287c11cf8a6a"} Dec 03 07:45:50 crc kubenswrapper[4612]: I1203 07:45:50.598602 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:50 crc kubenswrapper[4612]: I1203 07:45:50.598639 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:45:50 crc kubenswrapper[4612]: I1203 07:45:50.625051 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5687f788c4-jknl6" podStartSLOduration=4.625028919 podStartE2EDuration="4.625028919s" podCreationTimestamp="2025-12-03 07:45:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:45:50.6222491 +0000 UTC m=+1113.795606520" watchObservedRunningTime="2025-12-03 07:45:50.625028919 +0000 UTC m=+1113.798386319" Dec 03 07:45:50 crc kubenswrapper[4612]: I1203 07:45:50.777092 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-767d79bd88-5spkc" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 03 07:45:50 crc kubenswrapper[4612]: I1203 07:45:50.805023 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-55fc5c6c94-pjh5s" podUID="29d52104-a465-4ca0-a040-d9dba9e47600" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Dec 03 07:45:53 crc kubenswrapper[4612]: I1203 07:45:53.177706 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:45:53 crc kubenswrapper[4612]: I1203 07:45:53.626848 4612 generic.go:334] "Generic (PLEG): container finished" podID="cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08" containerID="8b96aee156c0cafacba42f1fdda0703299e6d91b25e2330e57d7bba7e1e49e95" exitCode=0 Dec 03 07:45:53 crc kubenswrapper[4612]: I1203 07:45:53.627037 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-nvxkr" event={"ID":"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08","Type":"ContainerDied","Data":"8b96aee156c0cafacba42f1fdda0703299e6d91b25e2330e57d7bba7e1e49e95"} Dec 03 07:45:53 crc kubenswrapper[4612]: I1203 07:45:53.970293 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-5dbdff7bb-vqj8s" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api-log" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 07:45:54 crc kubenswrapper[4612]: I1203 07:45:54.205505 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:45:54 crc kubenswrapper[4612]: I1203 07:45:54.267036 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-j7p7n"] Dec 03 07:45:54 crc kubenswrapper[4612]: I1203 07:45:54.267279 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" podUID="189d4587-3dd1-495a-9906-99f66131135c" containerName="dnsmasq-dns" containerID="cri-o://6c17e4a097df9dc6bb89e1521728fe03320abde2188574082fdc471fb4aaf05b" gracePeriod=10 Dec 03 07:45:54 crc kubenswrapper[4612]: I1203 07:45:54.329382 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:54 crc kubenswrapper[4612]: I1203 07:45:54.330082 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:45:54 crc kubenswrapper[4612]: I1203 07:45:54.652031 4612 generic.go:334] "Generic (PLEG): container finished" podID="189d4587-3dd1-495a-9906-99f66131135c" containerID="6c17e4a097df9dc6bb89e1521728fe03320abde2188574082fdc471fb4aaf05b" exitCode=0 Dec 03 07:45:54 crc kubenswrapper[4612]: I1203 07:45:54.652268 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" event={"ID":"189d4587-3dd1-495a-9906-99f66131135c","Type":"ContainerDied","Data":"6c17e4a097df9dc6bb89e1521728fe03320abde2188574082fdc471fb4aaf05b"} Dec 03 07:45:55 crc kubenswrapper[4612]: I1203 07:45:55.576494 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6f7b8b49c9-7g7fc" Dec 03 07:45:55 crc kubenswrapper[4612]: I1203 07:45:55.673025 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-79dcc9984-j6bsp"] Dec 03 07:45:55 crc kubenswrapper[4612]: I1203 07:45:55.673255 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-79dcc9984-j6bsp" podUID="871a775f-4f35-4128-8198-8bfa7df3ea61" containerName="neutron-api" containerID="cri-o://64aeab9d82e8506fbc885cd93e574aa85766e5145a0e7cdb5278933455055721" gracePeriod=30 Dec 03 07:45:55 crc kubenswrapper[4612]: I1203 07:45:55.673652 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-79dcc9984-j6bsp" podUID="871a775f-4f35-4128-8198-8bfa7df3ea61" containerName="neutron-httpd" containerID="cri-o://3de0aa6accc09f941b04e9adde46501382d2a7d186b39ba220132f2a9ca15b37" gracePeriod=30 Dec 03 07:45:56 crc kubenswrapper[4612]: I1203 07:45:56.708706 4612 generic.go:334] "Generic (PLEG): container finished" podID="871a775f-4f35-4128-8198-8bfa7df3ea61" containerID="3de0aa6accc09f941b04e9adde46501382d2a7d186b39ba220132f2a9ca15b37" exitCode=0 Dec 03 07:45:56 crc kubenswrapper[4612]: I1203 07:45:56.708978 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-79dcc9984-j6bsp" event={"ID":"871a775f-4f35-4128-8198-8bfa7df3ea61","Type":"ContainerDied","Data":"3de0aa6accc09f941b04e9adde46501382d2a7d186b39ba220132f2a9ca15b37"} Dec 03 07:45:57 crc kubenswrapper[4612]: I1203 07:45:57.914271 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" podUID="189d4587-3dd1-495a-9906-99f66131135c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.151:5353: connect: connection refused" Dec 03 07:46:00 crc kubenswrapper[4612]: E1203 07:46:00.529496 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/ubi9/httpd-24:latest" Dec 03 07:46:00 crc kubenswrapper[4612]: E1203 07:46:00.529996 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:proxy-httpd,Image:registry.redhat.io/ubi9/httpd-24:latest,Command:[/usr/sbin/httpd],Args:[-DFOREGROUND],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:proxy-httpd,HostPort:0,ContainerPort:3000,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf/httpd.conf,SubPath:httpd.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf.d/ssl.conf,SubPath:ssl.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run-httpd,ReadOnly:false,MountPath:/run/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log-httpd,ReadOnly:false,MountPath:/var/log/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bd8zc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(7e598a7a-6eed-4d39-9ffc-5e61f8251eb8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 07:46:00 crc kubenswrapper[4612]: E1203 07:46:00.531642 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"proxy-httpd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.563523 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.725463 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-etc-machine-id\") pod \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.725633 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-scripts\") pod \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.725630 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08" (UID: "cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.725693 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-combined-ca-bundle\") pod \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.725761 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6f84q\" (UniqueName: \"kubernetes.io/projected/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-kube-api-access-6f84q\") pod \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.725780 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-config-data\") pod \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.725851 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-db-sync-config-data\") pod \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\" (UID: \"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08\") " Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.726174 4612 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.735211 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-kube-api-access-6f84q" (OuterVolumeSpecName: "kube-api-access-6f84q") pod "cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08" (UID: "cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08"). InnerVolumeSpecName "kube-api-access-6f84q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.739411 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08" (UID: "cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.773092 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-scripts" (OuterVolumeSpecName: "scripts") pod "cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08" (UID: "cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.791284 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerName="ceilometer-central-agent" containerID="cri-o://a2727bf28968388de34caf11446b8d61b4e0049a4216e4832a2211c2fad17689" gracePeriod=30 Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.791432 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-nvxkr" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.791883 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-nvxkr" event={"ID":"cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08","Type":"ContainerDied","Data":"6157f35f6cbb9454508ddda22cfdaaea57a0748227cd9e2ccbffffb564cff6a4"} Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.791916 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6157f35f6cbb9454508ddda22cfdaaea57a0748227cd9e2ccbffffb564cff6a4" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.792851 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-767d79bd88-5spkc" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.792918 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.793608 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"4ab7257fb81ce6e4c6ccbe3f788b4aeba206cf28cb86b4855a2f3df4b1918a01"} pod="openstack/horizon-767d79bd88-5spkc" containerMessage="Container horizon failed startup probe, will be restarted" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.793635 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-767d79bd88-5spkc" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" containerID="cri-o://4ab7257fb81ce6e4c6ccbe3f788b4aeba206cf28cb86b4855a2f3df4b1918a01" gracePeriod=30 Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.794239 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerName="sg-core" containerID="cri-o://b6589bf3050524be6c4383f178d87017265e99ae240485a258e90683cae9fab9" gracePeriod=30 Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.794346 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerName="ceilometer-notification-agent" containerID="cri-o://063a61813af9666e725eaa514dfdbc404ef6acfb9ae4192c67ebfbecec10979b" gracePeriod=30 Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.798742 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.828485 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6f84q\" (UniqueName: \"kubernetes.io/projected/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-kube-api-access-6f84q\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.828514 4612 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.828527 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.876897 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-55fc5c6c94-pjh5s" podUID="29d52104-a465-4ca0-a040-d9dba9e47600" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.876990 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.879171 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"59d1c0b708def9bcaa0ff30604b6f69b90dc88a40d4c4565b1e6c09b0bb13d74"} pod="openstack/horizon-55fc5c6c94-pjh5s" containerMessage="Container horizon failed startup probe, will be restarted" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.879204 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-55fc5c6c94-pjh5s" podUID="29d52104-a465-4ca0-a040-d9dba9e47600" containerName="horizon" containerID="cri-o://59d1c0b708def9bcaa0ff30604b6f69b90dc88a40d4c4565b1e6c09b0bb13d74" gracePeriod=30 Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.975254 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08" (UID: "cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:00 crc kubenswrapper[4612]: I1203 07:46:00.994135 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-config-data" (OuterVolumeSpecName: "config-data") pod "cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08" (UID: "cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.009398 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.045488 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.045519 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.147836 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2mcv\" (UniqueName: \"kubernetes.io/projected/189d4587-3dd1-495a-9906-99f66131135c-kube-api-access-j2mcv\") pod \"189d4587-3dd1-495a-9906-99f66131135c\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.147937 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-ovsdbserver-sb\") pod \"189d4587-3dd1-495a-9906-99f66131135c\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.148095 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-dns-swift-storage-0\") pod \"189d4587-3dd1-495a-9906-99f66131135c\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.148147 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-ovsdbserver-nb\") pod \"189d4587-3dd1-495a-9906-99f66131135c\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.150680 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-config\") pod \"189d4587-3dd1-495a-9906-99f66131135c\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.150712 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-dns-svc\") pod \"189d4587-3dd1-495a-9906-99f66131135c\" (UID: \"189d4587-3dd1-495a-9906-99f66131135c\") " Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.159812 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/189d4587-3dd1-495a-9906-99f66131135c-kube-api-access-j2mcv" (OuterVolumeSpecName: "kube-api-access-j2mcv") pod "189d4587-3dd1-495a-9906-99f66131135c" (UID: "189d4587-3dd1-495a-9906-99f66131135c"). InnerVolumeSpecName "kube-api-access-j2mcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.249688 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "189d4587-3dd1-495a-9906-99f66131135c" (UID: "189d4587-3dd1-495a-9906-99f66131135c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.264034 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2mcv\" (UniqueName: \"kubernetes.io/projected/189d4587-3dd1-495a-9906-99f66131135c-kube-api-access-j2mcv\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.264062 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.289010 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "189d4587-3dd1-495a-9906-99f66131135c" (UID: "189d4587-3dd1-495a-9906-99f66131135c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.304321 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5687f788c4-jknl6" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.309333 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "189d4587-3dd1-495a-9906-99f66131135c" (UID: "189d4587-3dd1-495a-9906-99f66131135c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.318296 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "189d4587-3dd1-495a-9906-99f66131135c" (UID: "189d4587-3dd1-495a-9906-99f66131135c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.329879 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-config" (OuterVolumeSpecName: "config") pod "189d4587-3dd1-495a-9906-99f66131135c" (UID: "189d4587-3dd1-495a-9906-99f66131135c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.368381 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.368414 4612 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.368450 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.368464 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/189d4587-3dd1-495a-9906-99f66131135c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.403541 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5dbdff7bb-vqj8s"] Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.403761 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5dbdff7bb-vqj8s" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api-log" containerID="cri-o://5e6a60d48a26d5bb90bff792b975dc2dd087271754c4334dd3e9816843b76a44" gracePeriod=30 Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.404108 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5dbdff7bb-vqj8s" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api" containerID="cri-o://c7581249d0c533a2c88fa5773da20cc6c8dcb1e5a78cd690d45cfec853d1d718" gracePeriod=30 Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.412351 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-5dbdff7bb-vqj8s" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": EOF" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.412458 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5dbdff7bb-vqj8s" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": EOF" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.412626 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5dbdff7bb-vqj8s" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": EOF" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.413120 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-5dbdff7bb-vqj8s" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": EOF" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.840921 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 07:46:01 crc kubenswrapper[4612]: E1203 07:46:01.841402 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08" containerName="cinder-db-sync" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.841420 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08" containerName="cinder-db-sync" Dec 03 07:46:01 crc kubenswrapper[4612]: E1203 07:46:01.841451 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="189d4587-3dd1-495a-9906-99f66131135c" containerName="init" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.841458 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="189d4587-3dd1-495a-9906-99f66131135c" containerName="init" Dec 03 07:46:01 crc kubenswrapper[4612]: E1203 07:46:01.841472 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="189d4587-3dd1-495a-9906-99f66131135c" containerName="dnsmasq-dns" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.841480 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="189d4587-3dd1-495a-9906-99f66131135c" containerName="dnsmasq-dns" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.841693 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08" containerName="cinder-db-sync" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.841717 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="189d4587-3dd1-495a-9906-99f66131135c" containerName="dnsmasq-dns" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.842742 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.849441 4612 generic.go:334] "Generic (PLEG): container finished" podID="72991430-d273-4967-a788-5dfcb67793e1" containerID="5e6a60d48a26d5bb90bff792b975dc2dd087271754c4334dd3e9816843b76a44" exitCode=143 Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.852847 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.853033 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.853139 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-7fvv9" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.853179 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dbdff7bb-vqj8s" event={"ID":"72991430-d273-4967-a788-5dfcb67793e1","Type":"ContainerDied","Data":"5e6a60d48a26d5bb90bff792b975dc2dd087271754c4334dd3e9816843b76a44"} Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.853241 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.856516 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.877024 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppzdv\" (UniqueName: \"kubernetes.io/projected/ed3f4a53-f059-46df-999b-53a3e0ae385b-kube-api-access-ppzdv\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.877096 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-config-data\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.877119 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.877195 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.877209 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed3f4a53-f059-46df-999b-53a3e0ae385b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.877222 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-scripts\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.894326 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" event={"ID":"189d4587-3dd1-495a-9906-99f66131135c","Type":"ContainerDied","Data":"735c4826b008d0cce9697c8b7db3b4f6f292cf54edc95b0b1aadcf77f9d7a281"} Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.894379 4612 scope.go:117] "RemoveContainer" containerID="6c17e4a097df9dc6bb89e1521728fe03320abde2188574082fdc471fb4aaf05b" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.894513 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-j7p7n" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.907384 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-j4wbq"] Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.909680 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.925229 4612 generic.go:334] "Generic (PLEG): container finished" podID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerID="b6589bf3050524be6c4383f178d87017265e99ae240485a258e90683cae9fab9" exitCode=2 Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.925271 4612 generic.go:334] "Generic (PLEG): container finished" podID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerID="a2727bf28968388de34caf11446b8d61b4e0049a4216e4832a2211c2fad17689" exitCode=0 Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.926185 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8","Type":"ContainerDied","Data":"b6589bf3050524be6c4383f178d87017265e99ae240485a258e90683cae9fab9"} Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.926219 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8","Type":"ContainerDied","Data":"a2727bf28968388de34caf11446b8d61b4e0049a4216e4832a2211c2fad17689"} Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.945491 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-j4wbq"] Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.979531 4612 scope.go:117] "RemoveContainer" containerID="2d257237a7e4f024371b0bcbd2937d51660398ef14e8a8b712e4d65892f55d3c" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.982330 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-scripts\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.982360 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.982382 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed3f4a53-f059-46df-999b-53a3e0ae385b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.982426 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppzdv\" (UniqueName: \"kubernetes.io/projected/ed3f4a53-f059-46df-999b-53a3e0ae385b-kube-api-access-ppzdv\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.982491 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-config-data\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.982515 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:01 crc kubenswrapper[4612]: I1203 07:46:01.983050 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed3f4a53-f059-46df-999b-53a3e0ae385b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.004197 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-j7p7n"] Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.015178 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-scripts\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.026846 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.027065 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-config-data\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.027659 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-j7p7n"] Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.029468 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.038591 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppzdv\" (UniqueName: \"kubernetes.io/projected/ed3f4a53-f059-46df-999b-53a3e0ae385b-kube-api-access-ppzdv\") pod \"cinder-scheduler-0\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.102924 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.112956 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.113933 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tptqk\" (UniqueName: \"kubernetes.io/projected/ac049611-199b-46af-ac88-42736bbf522d-kube-api-access-tptqk\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.120394 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.120525 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.120744 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-config\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.194356 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.222870 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.223172 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.223307 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tptqk\" (UniqueName: \"kubernetes.io/projected/ac049611-199b-46af-ac88-42736bbf522d-kube-api-access-tptqk\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.223479 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.223583 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.223713 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-config\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.224838 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-config\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.225650 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.226147 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.226494 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.226663 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.247540 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tptqk\" (UniqueName: \"kubernetes.io/projected/ac049611-199b-46af-ac88-42736bbf522d-kube-api-access-tptqk\") pod \"dnsmasq-dns-6bb4fc677f-j4wbq\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.295409 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.345471 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.347064 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.367312 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.431411 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-etc-machine-id\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.433111 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-scripts\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.433320 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-logs\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.433418 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-config-data\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.433499 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.433586 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmpkd\" (UniqueName: \"kubernetes.io/projected/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-kube-api-access-kmpkd\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.433690 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-config-data-custom\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.449297 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.540277 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-logs\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.540850 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.540985 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-config-data\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.541077 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmpkd\" (UniqueName: \"kubernetes.io/projected/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-kube-api-access-kmpkd\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.541217 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-config-data-custom\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.541369 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-etc-machine-id\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.541593 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-scripts\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.542641 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-logs\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.545373 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-etc-machine-id\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.579764 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-scripts\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.580311 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmpkd\" (UniqueName: \"kubernetes.io/projected/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-kube-api-access-kmpkd\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.580663 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-config-data-custom\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.581904 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-config-data\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.592628 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.792192 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 07:46:02 crc kubenswrapper[4612]: I1203 07:46:02.977141 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 07:46:03 crc kubenswrapper[4612]: I1203 07:46:03.107484 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="189d4587-3dd1-495a-9906-99f66131135c" path="/var/lib/kubelet/pods/189d4587-3dd1-495a-9906-99f66131135c/volumes" Dec 03 07:46:03 crc kubenswrapper[4612]: I1203 07:46:03.207784 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-j4wbq"] Dec 03 07:46:03 crc kubenswrapper[4612]: I1203 07:46:03.456019 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.024767 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ed3f4a53-f059-46df-999b-53a3e0ae385b","Type":"ContainerStarted","Data":"fe6cc12b293e0a5360963f3a80fa80dc0ff28e0b5014ece131847ad87368e6bc"} Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.030241 4612 generic.go:334] "Generic (PLEG): container finished" podID="ac049611-199b-46af-ac88-42736bbf522d" containerID="3a4b20c303e1f22cc2642f57344a45df151bb467d5f19e0bc5e382d3a18c6c62" exitCode=0 Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.030337 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" event={"ID":"ac049611-199b-46af-ac88-42736bbf522d","Type":"ContainerDied","Data":"3a4b20c303e1f22cc2642f57344a45df151bb467d5f19e0bc5e382d3a18c6c62"} Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.030370 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" event={"ID":"ac049611-199b-46af-ac88-42736bbf522d","Type":"ContainerStarted","Data":"a8219a496466337c5da879b9e107a05b0712a5b8c1f38fd0e5d9a67170cdf7be"} Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.034924 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd","Type":"ContainerStarted","Data":"65ee3cdbb67a905460044d3366d868f122418b6f39f6e6fbd562df7fdb3df1be"} Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.869354 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.897532 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-log-httpd\") pod \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.897882 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-combined-ca-bundle\") pod \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.897969 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-config-data\") pod \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.897992 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-scripts\") pod \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.898051 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-sg-core-conf-yaml\") pod \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.898085 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-run-httpd\") pod \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.898139 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bd8zc\" (UniqueName: \"kubernetes.io/projected/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-kube-api-access-bd8zc\") pod \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\" (UID: \"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8\") " Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.899338 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" (UID: "7e598a7a-6eed-4d39-9ffc-5e61f8251eb8"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.899799 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" (UID: "7e598a7a-6eed-4d39-9ffc-5e61f8251eb8"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.905220 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-scripts" (OuterVolumeSpecName: "scripts") pod "7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" (UID: "7e598a7a-6eed-4d39-9ffc-5e61f8251eb8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.911819 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-kube-api-access-bd8zc" (OuterVolumeSpecName: "kube-api-access-bd8zc") pod "7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" (UID: "7e598a7a-6eed-4d39-9ffc-5e61f8251eb8"). InnerVolumeSpecName "kube-api-access-bd8zc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.970574 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.989038 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-config-data" (OuterVolumeSpecName: "config-data") pod "7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" (UID: "7e598a7a-6eed-4d39-9ffc-5e61f8251eb8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.996359 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5cd4567696-hsfd9" Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.999606 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bd8zc\" (UniqueName: \"kubernetes.io/projected/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-kube-api-access-bd8zc\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.999624 4612 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.999634 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.999643 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:04 crc kubenswrapper[4612]: I1203 07:46:04.999651 4612 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.031635 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" (UID: "7e598a7a-6eed-4d39-9ffc-5e61f8251eb8"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.080307 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" (UID: "7e598a7a-6eed-4d39-9ffc-5e61f8251eb8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.108685 4612 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.108744 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.124271 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.124300 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" event={"ID":"ac049611-199b-46af-ac88-42736bbf522d","Type":"ContainerStarted","Data":"c13073aeadea899f842d6536a2fdbbdb1ff96e3954511c5249ed62939427b063"} Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.124314 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd","Type":"ContainerStarted","Data":"fa2ae64e079baaf0be2fff78d798f036ab500dc474972f9db38538ef64a68109"} Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.138546 4612 generic.go:334] "Generic (PLEG): container finished" podID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerID="063a61813af9666e725eaa514dfdbc404ef6acfb9ae4192c67ebfbecec10979b" exitCode=0 Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.138595 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.138651 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8","Type":"ContainerDied","Data":"063a61813af9666e725eaa514dfdbc404ef6acfb9ae4192c67ebfbecec10979b"} Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.138676 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e598a7a-6eed-4d39-9ffc-5e61f8251eb8","Type":"ContainerDied","Data":"80eac5d153f2975edce2cf89aeeb857c078a15cd302880eda508e12f8c4b319f"} Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.138715 4612 scope.go:117] "RemoveContainer" containerID="b6589bf3050524be6c4383f178d87017265e99ae240485a258e90683cae9fab9" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.188730 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" podStartSLOduration=4.188711078 podStartE2EDuration="4.188711078s" podCreationTimestamp="2025-12-03 07:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:46:05.141527542 +0000 UTC m=+1128.314884952" watchObservedRunningTime="2025-12-03 07:46:05.188711078 +0000 UTC m=+1128.362068478" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.201127 4612 scope.go:117] "RemoveContainer" containerID="063a61813af9666e725eaa514dfdbc404ef6acfb9ae4192c67ebfbecec10979b" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.371543 4612 scope.go:117] "RemoveContainer" containerID="a2727bf28968388de34caf11446b8d61b4e0049a4216e4832a2211c2fad17689" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.387022 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.406797 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.446994 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:46:05 crc kubenswrapper[4612]: E1203 07:46:05.447372 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerName="sg-core" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.447383 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerName="sg-core" Dec 03 07:46:05 crc kubenswrapper[4612]: E1203 07:46:05.447402 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerName="ceilometer-notification-agent" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.447409 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerName="ceilometer-notification-agent" Dec 03 07:46:05 crc kubenswrapper[4612]: E1203 07:46:05.447427 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerName="ceilometer-central-agent" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.447433 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerName="ceilometer-central-agent" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.447597 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerName="sg-core" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.447611 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerName="ceilometer-central-agent" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.447628 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" containerName="ceilometer-notification-agent" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.449189 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.460766 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.460992 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.467993 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.626238 4612 scope.go:117] "RemoveContainer" containerID="b6589bf3050524be6c4383f178d87017265e99ae240485a258e90683cae9fab9" Dec 03 07:46:05 crc kubenswrapper[4612]: E1203 07:46:05.630099 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6589bf3050524be6c4383f178d87017265e99ae240485a258e90683cae9fab9\": container with ID starting with b6589bf3050524be6c4383f178d87017265e99ae240485a258e90683cae9fab9 not found: ID does not exist" containerID="b6589bf3050524be6c4383f178d87017265e99ae240485a258e90683cae9fab9" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.630144 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6589bf3050524be6c4383f178d87017265e99ae240485a258e90683cae9fab9"} err="failed to get container status \"b6589bf3050524be6c4383f178d87017265e99ae240485a258e90683cae9fab9\": rpc error: code = NotFound desc = could not find container \"b6589bf3050524be6c4383f178d87017265e99ae240485a258e90683cae9fab9\": container with ID starting with b6589bf3050524be6c4383f178d87017265e99ae240485a258e90683cae9fab9 not found: ID does not exist" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.630169 4612 scope.go:117] "RemoveContainer" containerID="063a61813af9666e725eaa514dfdbc404ef6acfb9ae4192c67ebfbecec10979b" Dec 03 07:46:05 crc kubenswrapper[4612]: E1203 07:46:05.633272 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"063a61813af9666e725eaa514dfdbc404ef6acfb9ae4192c67ebfbecec10979b\": container with ID starting with 063a61813af9666e725eaa514dfdbc404ef6acfb9ae4192c67ebfbecec10979b not found: ID does not exist" containerID="063a61813af9666e725eaa514dfdbc404ef6acfb9ae4192c67ebfbecec10979b" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.633316 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"063a61813af9666e725eaa514dfdbc404ef6acfb9ae4192c67ebfbecec10979b"} err="failed to get container status \"063a61813af9666e725eaa514dfdbc404ef6acfb9ae4192c67ebfbecec10979b\": rpc error: code = NotFound desc = could not find container \"063a61813af9666e725eaa514dfdbc404ef6acfb9ae4192c67ebfbecec10979b\": container with ID starting with 063a61813af9666e725eaa514dfdbc404ef6acfb9ae4192c67ebfbecec10979b not found: ID does not exist" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.633346 4612 scope.go:117] "RemoveContainer" containerID="a2727bf28968388de34caf11446b8d61b4e0049a4216e4832a2211c2fad17689" Dec 03 07:46:05 crc kubenswrapper[4612]: E1203 07:46:05.633680 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2727bf28968388de34caf11446b8d61b4e0049a4216e4832a2211c2fad17689\": container with ID starting with a2727bf28968388de34caf11446b8d61b4e0049a4216e4832a2211c2fad17689 not found: ID does not exist" containerID="a2727bf28968388de34caf11446b8d61b4e0049a4216e4832a2211c2fad17689" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.633708 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2727bf28968388de34caf11446b8d61b4e0049a4216e4832a2211c2fad17689"} err="failed to get container status \"a2727bf28968388de34caf11446b8d61b4e0049a4216e4832a2211c2fad17689\": rpc error: code = NotFound desc = could not find container \"a2727bf28968388de34caf11446b8d61b4e0049a4216e4832a2211c2fad17689\": container with ID starting with a2727bf28968388de34caf11446b8d61b4e0049a4216e4832a2211c2fad17689 not found: ID does not exist" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.642220 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc9p4\" (UniqueName: \"kubernetes.io/projected/b3d66811-da8a-485c-9dd8-092f29388b2a-kube-api-access-gc9p4\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.642256 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-config-data\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.642301 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-scripts\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.642383 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.642401 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3d66811-da8a-485c-9dd8-092f29388b2a-run-httpd\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.642426 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3d66811-da8a-485c-9dd8-092f29388b2a-log-httpd\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.642442 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.743847 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.743907 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3d66811-da8a-485c-9dd8-092f29388b2a-run-httpd\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.744010 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3d66811-da8a-485c-9dd8-092f29388b2a-log-httpd\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.744026 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.744086 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc9p4\" (UniqueName: \"kubernetes.io/projected/b3d66811-da8a-485c-9dd8-092f29388b2a-kube-api-access-gc9p4\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.744111 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-config-data\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.744156 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-scripts\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.745060 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3d66811-da8a-485c-9dd8-092f29388b2a-log-httpd\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.745326 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3d66811-da8a-485c-9dd8-092f29388b2a-run-httpd\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.750458 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-config-data\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.750910 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.762971 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-scripts\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.775100 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc9p4\" (UniqueName: \"kubernetes.io/projected/b3d66811-da8a-485c-9dd8-092f29388b2a-kube-api-access-gc9p4\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.788691 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " pod="openstack/ceilometer-0" Dec 03 07:46:05 crc kubenswrapper[4612]: I1203 07:46:05.903743 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.257383 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd","Type":"ContainerStarted","Data":"c64f317d2d090adae5e73d98d55037178a887e24d665767e000338670542bc3c"} Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.259127 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.271779 4612 generic.go:334] "Generic (PLEG): container finished" podID="871a775f-4f35-4128-8198-8bfa7df3ea61" containerID="64aeab9d82e8506fbc885cd93e574aa85766e5145a0e7cdb5278933455055721" exitCode=0 Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.271875 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-79dcc9984-j6bsp" event={"ID":"871a775f-4f35-4128-8198-8bfa7df3ea61","Type":"ContainerDied","Data":"64aeab9d82e8506fbc885cd93e574aa85766e5145a0e7cdb5278933455055721"} Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.296188 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ed3f4a53-f059-46df-999b-53a3e0ae385b","Type":"ContainerStarted","Data":"d3dce5298c8944d9ae593489fdb7e9a915fb059ada43ec0f222a9e183080b823"} Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.317144 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5687f788c4-jknl6" podUID="ad775971-28a0-4fd6-8e8b-e10e5a9c0c50" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.160:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.318314 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.350575 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.350551841 podStartE2EDuration="4.350551841s" podCreationTimestamp="2025-12-03 07:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:46:06.33123421 +0000 UTC m=+1129.504591620" watchObservedRunningTime="2025-12-03 07:46:06.350551841 +0000 UTC m=+1129.523909251" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.457594 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-5dbdff7bb-vqj8s" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.620190 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:46:06 crc kubenswrapper[4612]: W1203 07:46:06.628355 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3d66811_da8a_485c_9dd8_092f29388b2a.slice/crio-0d497bb8ce0a45b0af6a30526b71f6af35b5fdcef51c4162e322a6dce5c54aa4 WatchSource:0}: Error finding container 0d497bb8ce0a45b0af6a30526b71f6af35b5fdcef51c4162e322a6dce5c54aa4: Status 404 returned error can't find the container with id 0d497bb8ce0a45b0af6a30526b71f6af35b5fdcef51c4162e322a6dce5c54aa4 Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.774303 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.892851 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-combined-ca-bundle\") pod \"871a775f-4f35-4128-8198-8bfa7df3ea61\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.893319 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-config\") pod \"871a775f-4f35-4128-8198-8bfa7df3ea61\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.893399 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-ovndb-tls-certs\") pod \"871a775f-4f35-4128-8198-8bfa7df3ea61\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.893426 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw8wc\" (UniqueName: \"kubernetes.io/projected/871a775f-4f35-4128-8198-8bfa7df3ea61-kube-api-access-sw8wc\") pod \"871a775f-4f35-4128-8198-8bfa7df3ea61\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.893512 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-httpd-config\") pod \"871a775f-4f35-4128-8198-8bfa7df3ea61\" (UID: \"871a775f-4f35-4128-8198-8bfa7df3ea61\") " Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.898234 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "871a775f-4f35-4128-8198-8bfa7df3ea61" (UID: "871a775f-4f35-4128-8198-8bfa7df3ea61"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.899844 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/871a775f-4f35-4128-8198-8bfa7df3ea61-kube-api-access-sw8wc" (OuterVolumeSpecName: "kube-api-access-sw8wc") pod "871a775f-4f35-4128-8198-8bfa7df3ea61" (UID: "871a775f-4f35-4128-8198-8bfa7df3ea61"). InnerVolumeSpecName "kube-api-access-sw8wc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.974141 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-config" (OuterVolumeSpecName: "config") pod "871a775f-4f35-4128-8198-8bfa7df3ea61" (UID: "871a775f-4f35-4128-8198-8bfa7df3ea61"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.980773 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "871a775f-4f35-4128-8198-8bfa7df3ea61" (UID: "871a775f-4f35-4128-8198-8bfa7df3ea61"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.983460 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-5dbdff7bb-vqj8s" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:57870->10.217.0.159:9311: read: connection reset by peer" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.983475 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5dbdff7bb-vqj8s" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:57850->10.217.0.159:9311: read: connection reset by peer" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.983546 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.983742 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5dbdff7bb-vqj8s" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:57860->10.217.0.159:9311: read: connection reset by peer" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.995641 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.995781 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.995857 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw8wc\" (UniqueName: \"kubernetes.io/projected/871a775f-4f35-4128-8198-8bfa7df3ea61-kube-api-access-sw8wc\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:06 crc kubenswrapper[4612]: I1203 07:46:06.995928 4612 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.023747 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "871a775f-4f35-4128-8198-8bfa7df3ea61" (UID: "871a775f-4f35-4128-8198-8bfa7df3ea61"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.097300 4612 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/871a775f-4f35-4128-8198-8bfa7df3ea61-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.102005 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e598a7a-6eed-4d39-9ffc-5e61f8251eb8" path="/var/lib/kubelet/pods/7e598a7a-6eed-4d39-9ffc-5e61f8251eb8/volumes" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.319104 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-79dcc9984-j6bsp" event={"ID":"871a775f-4f35-4128-8198-8bfa7df3ea61","Type":"ContainerDied","Data":"ebb9b2e5a5f58b764116d34e3b0ce4c7b054e23f17257b40b47db13d70b5f869"} Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.319520 4612 scope.go:117] "RemoveContainer" containerID="3de0aa6accc09f941b04e9adde46501382d2a7d186b39ba220132f2a9ca15b37" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.319657 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-79dcc9984-j6bsp" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.341291 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ed3f4a53-f059-46df-999b-53a3e0ae385b","Type":"ContainerStarted","Data":"2ee71cc69b7ef059e1ef3059a1d679c0fe4cb1e5725074542c3a0fbd16187feb"} Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.349713 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3d66811-da8a-485c-9dd8-092f29388b2a","Type":"ContainerStarted","Data":"0d497bb8ce0a45b0af6a30526b71f6af35b5fdcef51c4162e322a6dce5c54aa4"} Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.355613 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-79dcc9984-j6bsp"] Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.356205 4612 generic.go:334] "Generic (PLEG): container finished" podID="72991430-d273-4967-a788-5dfcb67793e1" containerID="c7581249d0c533a2c88fa5773da20cc6c8dcb1e5a78cd690d45cfec853d1d718" exitCode=0 Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.356252 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dbdff7bb-vqj8s" event={"ID":"72991430-d273-4967-a788-5dfcb67793e1","Type":"ContainerDied","Data":"c7581249d0c533a2c88fa5773da20cc6c8dcb1e5a78cd690d45cfec853d1d718"} Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.382110 4612 scope.go:117] "RemoveContainer" containerID="64aeab9d82e8506fbc885cd93e574aa85766e5145a0e7cdb5278933455055721" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.385248 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-79dcc9984-j6bsp"] Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.397284 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.327271612 podStartE2EDuration="6.397271745s" podCreationTimestamp="2025-12-03 07:46:01 +0000 UTC" firstStartedPulling="2025-12-03 07:46:03.010163661 +0000 UTC m=+1126.183521071" lastFinishedPulling="2025-12-03 07:46:04.080163804 +0000 UTC m=+1127.253521204" observedRunningTime="2025-12-03 07:46:07.376505438 +0000 UTC m=+1130.549862838" watchObservedRunningTime="2025-12-03 07:46:07.397271745 +0000 UTC m=+1130.570629145" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.408123 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.510751 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72991430-d273-4967-a788-5dfcb67793e1-logs\") pod \"72991430-d273-4967-a788-5dfcb67793e1\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.510813 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-config-data\") pod \"72991430-d273-4967-a788-5dfcb67793e1\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.510872 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qkhs\" (UniqueName: \"kubernetes.io/projected/72991430-d273-4967-a788-5dfcb67793e1-kube-api-access-6qkhs\") pod \"72991430-d273-4967-a788-5dfcb67793e1\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.510908 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-combined-ca-bundle\") pod \"72991430-d273-4967-a788-5dfcb67793e1\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.510977 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-config-data-custom\") pod \"72991430-d273-4967-a788-5dfcb67793e1\" (UID: \"72991430-d273-4967-a788-5dfcb67793e1\") " Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.511914 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72991430-d273-4967-a788-5dfcb67793e1-logs" (OuterVolumeSpecName: "logs") pod "72991430-d273-4967-a788-5dfcb67793e1" (UID: "72991430-d273-4967-a788-5dfcb67793e1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.512225 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72991430-d273-4967-a788-5dfcb67793e1-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.522575 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "72991430-d273-4967-a788-5dfcb67793e1" (UID: "72991430-d273-4967-a788-5dfcb67793e1"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.530384 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72991430-d273-4967-a788-5dfcb67793e1-kube-api-access-6qkhs" (OuterVolumeSpecName: "kube-api-access-6qkhs") pod "72991430-d273-4967-a788-5dfcb67793e1" (UID: "72991430-d273-4967-a788-5dfcb67793e1"). InnerVolumeSpecName "kube-api-access-6qkhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.592119 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72991430-d273-4967-a788-5dfcb67793e1" (UID: "72991430-d273-4967-a788-5dfcb67793e1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.615666 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qkhs\" (UniqueName: \"kubernetes.io/projected/72991430-d273-4967-a788-5dfcb67793e1-kube-api-access-6qkhs\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.615696 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.615706 4612 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.628852 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-config-data" (OuterVolumeSpecName: "config-data") pod "72991430-d273-4967-a788-5dfcb67793e1" (UID: "72991430-d273-4967-a788-5dfcb67793e1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:07 crc kubenswrapper[4612]: I1203 07:46:07.726262 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72991430-d273-4967-a788-5dfcb67793e1-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:08 crc kubenswrapper[4612]: I1203 07:46:08.366315 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3d66811-da8a-485c-9dd8-092f29388b2a","Type":"ContainerStarted","Data":"264f8814be5076dec578447aeb666a338405ee5d52a94879ef4636da6198ec20"} Dec 03 07:46:08 crc kubenswrapper[4612]: I1203 07:46:08.366361 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3d66811-da8a-485c-9dd8-092f29388b2a","Type":"ContainerStarted","Data":"cf62adc9b2e9b0034859b41bece66812579f148487e9c68316b6888c4217cacd"} Dec 03 07:46:08 crc kubenswrapper[4612]: I1203 07:46:08.368047 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dbdff7bb-vqj8s" event={"ID":"72991430-d273-4967-a788-5dfcb67793e1","Type":"ContainerDied","Data":"bcba1a0aedbf505610ecfecfa27a1699e19d5979ed1fe5be5f14eff2deba9d5f"} Dec 03 07:46:08 crc kubenswrapper[4612]: I1203 07:46:08.368265 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dbdff7bb-vqj8s" Dec 03 07:46:08 crc kubenswrapper[4612]: I1203 07:46:08.368315 4612 scope.go:117] "RemoveContainer" containerID="c7581249d0c533a2c88fa5773da20cc6c8dcb1e5a78cd690d45cfec853d1d718" Dec 03 07:46:08 crc kubenswrapper[4612]: I1203 07:46:08.378253 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" containerName="cinder-api-log" containerID="cri-o://fa2ae64e079baaf0be2fff78d798f036ab500dc474972f9db38538ef64a68109" gracePeriod=30 Dec 03 07:46:08 crc kubenswrapper[4612]: I1203 07:46:08.378563 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" containerName="cinder-api" containerID="cri-o://c64f317d2d090adae5e73d98d55037178a887e24d665767e000338670542bc3c" gracePeriod=30 Dec 03 07:46:08 crc kubenswrapper[4612]: I1203 07:46:08.399243 4612 scope.go:117] "RemoveContainer" containerID="5e6a60d48a26d5bb90bff792b975dc2dd087271754c4334dd3e9816843b76a44" Dec 03 07:46:08 crc kubenswrapper[4612]: I1203 07:46:08.402124 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5dbdff7bb-vqj8s"] Dec 03 07:46:08 crc kubenswrapper[4612]: I1203 07:46:08.412769 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5dbdff7bb-vqj8s"] Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.102708 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72991430-d273-4967-a788-5dfcb67793e1" path="/var/lib/kubelet/pods/72991430-d273-4967-a788-5dfcb67793e1/volumes" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.103761 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="871a775f-4f35-4128-8198-8bfa7df3ea61" path="/var/lib/kubelet/pods/871a775f-4f35-4128-8198-8bfa7df3ea61/volumes" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.390606 4612 generic.go:334] "Generic (PLEG): container finished" podID="654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" containerID="c64f317d2d090adae5e73d98d55037178a887e24d665767e000338670542bc3c" exitCode=0 Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.390648 4612 generic.go:334] "Generic (PLEG): container finished" podID="654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" containerID="fa2ae64e079baaf0be2fff78d798f036ab500dc474972f9db38538ef64a68109" exitCode=143 Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.390700 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd","Type":"ContainerDied","Data":"c64f317d2d090adae5e73d98d55037178a887e24d665767e000338670542bc3c"} Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.390732 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd","Type":"ContainerDied","Data":"fa2ae64e079baaf0be2fff78d798f036ab500dc474972f9db38538ef64a68109"} Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.393655 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3d66811-da8a-485c-9dd8-092f29388b2a","Type":"ContainerStarted","Data":"23e6de29c7fb5a16a4445db935ed2bc8c94e0e529ca22f31bb6cfd5b6997c820"} Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.400154 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.559113 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-config-data\") pod \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.559391 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmpkd\" (UniqueName: \"kubernetes.io/projected/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-kube-api-access-kmpkd\") pod \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.559429 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-scripts\") pod \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.559449 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-logs\") pod \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.559548 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-combined-ca-bundle\") pod \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.559890 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-logs" (OuterVolumeSpecName: "logs") pod "654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" (UID: "654f18cf-23ef-49bc-bdeb-91bc1e68f4fd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.560176 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-config-data-custom\") pod \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.560218 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-etc-machine-id\") pod \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\" (UID: \"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd\") " Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.560520 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" (UID: "654f18cf-23ef-49bc-bdeb-91bc1e68f4fd"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.560727 4612 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.560741 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.567100 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-scripts" (OuterVolumeSpecName: "scripts") pod "654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" (UID: "654f18cf-23ef-49bc-bdeb-91bc1e68f4fd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.571133 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" (UID: "654f18cf-23ef-49bc-bdeb-91bc1e68f4fd"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.579089 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-kube-api-access-kmpkd" (OuterVolumeSpecName: "kube-api-access-kmpkd") pod "654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" (UID: "654f18cf-23ef-49bc-bdeb-91bc1e68f4fd"). InnerVolumeSpecName "kube-api-access-kmpkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.593585 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" (UID: "654f18cf-23ef-49bc-bdeb-91bc1e68f4fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.618062 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-config-data" (OuterVolumeSpecName: "config-data") pod "654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" (UID: "654f18cf-23ef-49bc-bdeb-91bc1e68f4fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.662382 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.662415 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmpkd\" (UniqueName: \"kubernetes.io/projected/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-kube-api-access-kmpkd\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.662427 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.662438 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:09 crc kubenswrapper[4612]: I1203 07:46:09.662451 4612 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.359760 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-664f766786-5w95f" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.414446 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"654f18cf-23ef-49bc-bdeb-91bc1e68f4fd","Type":"ContainerDied","Data":"65ee3cdbb67a905460044d3366d868f122418b6f39f6e6fbd562df7fdb3df1be"} Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.414501 4612 scope.go:117] "RemoveContainer" containerID="c64f317d2d090adae5e73d98d55037178a887e24d665767e000338670542bc3c" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.414512 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.426044 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3d66811-da8a-485c-9dd8-092f29388b2a","Type":"ContainerStarted","Data":"74f8e78493cd009de97ef69e3650a494f6b07b27b81cef8b588f3b84ce06ec6f"} Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.427368 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.463263 4612 scope.go:117] "RemoveContainer" containerID="fa2ae64e079baaf0be2fff78d798f036ab500dc474972f9db38538ef64a68109" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.485701 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.1463158509999998 podStartE2EDuration="5.485667466s" podCreationTimestamp="2025-12-03 07:46:05 +0000 UTC" firstStartedPulling="2025-12-03 07:46:06.630729963 +0000 UTC m=+1129.804087353" lastFinishedPulling="2025-12-03 07:46:09.970081568 +0000 UTC m=+1133.143438968" observedRunningTime="2025-12-03 07:46:10.453822842 +0000 UTC m=+1133.627180262" watchObservedRunningTime="2025-12-03 07:46:10.485667466 +0000 UTC m=+1133.659024876" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.551570 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.563159 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.575374 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 07:46:10 crc kubenswrapper[4612]: E1203 07:46:10.575850 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" containerName="cinder-api" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.575933 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" containerName="cinder-api" Dec 03 07:46:10 crc kubenswrapper[4612]: E1203 07:46:10.576052 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="871a775f-4f35-4128-8198-8bfa7df3ea61" containerName="neutron-httpd" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.576119 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="871a775f-4f35-4128-8198-8bfa7df3ea61" containerName="neutron-httpd" Dec 03 07:46:10 crc kubenswrapper[4612]: E1203 07:46:10.576201 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="871a775f-4f35-4128-8198-8bfa7df3ea61" containerName="neutron-api" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.576276 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="871a775f-4f35-4128-8198-8bfa7df3ea61" containerName="neutron-api" Dec 03 07:46:10 crc kubenswrapper[4612]: E1203 07:46:10.576359 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" containerName="cinder-api-log" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.576431 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" containerName="cinder-api-log" Dec 03 07:46:10 crc kubenswrapper[4612]: E1203 07:46:10.576512 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api-log" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.576582 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api-log" Dec 03 07:46:10 crc kubenswrapper[4612]: E1203 07:46:10.576662 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.576738 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.577094 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" containerName="cinder-api" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.577195 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.577278 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" containerName="cinder-api-log" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.577359 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="871a775f-4f35-4128-8198-8bfa7df3ea61" containerName="neutron-api" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.577434 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="871a775f-4f35-4128-8198-8bfa7df3ea61" containerName="neutron-httpd" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.577523 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="72991430-d273-4967-a788-5dfcb67793e1" containerName="barbican-api-log" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.578811 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.581938 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.582491 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.585810 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.603356 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.685371 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.685569 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-logs\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.685613 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.685751 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gdlc\" (UniqueName: \"kubernetes.io/projected/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-kube-api-access-6gdlc\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.685817 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-config-data-custom\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.685863 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.685908 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-config-data\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.685986 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-scripts\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.686055 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-public-tls-certs\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.787610 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.787670 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-config-data\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.787702 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-scripts\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.787728 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-public-tls-certs\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.787780 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.787820 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-logs\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.787837 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.787880 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gdlc\" (UniqueName: \"kubernetes.io/projected/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-kube-api-access-6gdlc\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.787908 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-config-data-custom\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.788917 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.789410 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-logs\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.797390 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.799262 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.799808 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-config-data-custom\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.803498 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-public-tls-certs\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.803989 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-scripts\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.809395 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gdlc\" (UniqueName: \"kubernetes.io/projected/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-kube-api-access-6gdlc\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.822180 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/751ad1b4-cd3c-4616-99ed-9b19fee06ae8-config-data\") pod \"cinder-api-0\" (UID: \"751ad1b4-cd3c-4616-99ed-9b19fee06ae8\") " pod="openstack/cinder-api-0" Dec 03 07:46:10 crc kubenswrapper[4612]: I1203 07:46:10.923910 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 07:46:11 crc kubenswrapper[4612]: I1203 07:46:11.101660 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="654f18cf-23ef-49bc-bdeb-91bc1e68f4fd" path="/var/lib/kubelet/pods/654f18cf-23ef-49bc-bdeb-91bc1e68f4fd/volumes" Dec 03 07:46:11 crc kubenswrapper[4612]: I1203 07:46:11.498842 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 07:46:12 crc kubenswrapper[4612]: I1203 07:46:12.195631 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 03 07:46:12 crc kubenswrapper[4612]: I1203 07:46:12.298385 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:46:12 crc kubenswrapper[4612]: I1203 07:46:12.377652 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-pmd6s"] Dec 03 07:46:12 crc kubenswrapper[4612]: I1203 07:46:12.378162 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" podUID="8a7ad504-0ba1-4a87-9197-7c7e6d934273" containerName="dnsmasq-dns" containerID="cri-o://8f7cb05b9d3af49770849001c36d685e805846608b1cf7f3b55a96ba7a7ff48c" gracePeriod=10 Dec 03 07:46:12 crc kubenswrapper[4612]: I1203 07:46:12.476426 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"751ad1b4-cd3c-4616-99ed-9b19fee06ae8","Type":"ContainerStarted","Data":"f7446990854f85872bfa8d4d6a391f05c3991e2cd7dedf7c435d63409b297a96"} Dec 03 07:46:12 crc kubenswrapper[4612]: I1203 07:46:12.476463 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"751ad1b4-cd3c-4616-99ed-9b19fee06ae8","Type":"ContainerStarted","Data":"5a1ade0eeec25970e10567dc5cda28c82ec7f9eff8dd8001ca3814299860dcf1"} Dec 03 07:46:12 crc kubenswrapper[4612]: I1203 07:46:12.659877 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 03 07:46:12 crc kubenswrapper[4612]: I1203 07:46:12.737699 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.072924 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.126003 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-config\") pod \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.126096 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-ovsdbserver-nb\") pod \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.126149 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szwkf\" (UniqueName: \"kubernetes.io/projected/8a7ad504-0ba1-4a87-9197-7c7e6d934273-kube-api-access-szwkf\") pod \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.126231 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-dns-swift-storage-0\") pod \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.126354 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-ovsdbserver-sb\") pod \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.126408 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-dns-svc\") pod \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\" (UID: \"8a7ad504-0ba1-4a87-9197-7c7e6d934273\") " Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.150230 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a7ad504-0ba1-4a87-9197-7c7e6d934273-kube-api-access-szwkf" (OuterVolumeSpecName: "kube-api-access-szwkf") pod "8a7ad504-0ba1-4a87-9197-7c7e6d934273" (UID: "8a7ad504-0ba1-4a87-9197-7c7e6d934273"). InnerVolumeSpecName "kube-api-access-szwkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.223561 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8a7ad504-0ba1-4a87-9197-7c7e6d934273" (UID: "8a7ad504-0ba1-4a87-9197-7c7e6d934273"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.228570 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szwkf\" (UniqueName: \"kubernetes.io/projected/8a7ad504-0ba1-4a87-9197-7c7e6d934273-kube-api-access-szwkf\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.228691 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.243403 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-config" (OuterVolumeSpecName: "config") pod "8a7ad504-0ba1-4a87-9197-7c7e6d934273" (UID: "8a7ad504-0ba1-4a87-9197-7c7e6d934273"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.273655 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8a7ad504-0ba1-4a87-9197-7c7e6d934273" (UID: "8a7ad504-0ba1-4a87-9197-7c7e6d934273"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.274409 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8a7ad504-0ba1-4a87-9197-7c7e6d934273" (UID: "8a7ad504-0ba1-4a87-9197-7c7e6d934273"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.307618 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8a7ad504-0ba1-4a87-9197-7c7e6d934273" (UID: "8a7ad504-0ba1-4a87-9197-7c7e6d934273"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.330700 4612 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.330732 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.330741 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.330751 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a7ad504-0ba1-4a87-9197-7c7e6d934273-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.488367 4612 generic.go:334] "Generic (PLEG): container finished" podID="8a7ad504-0ba1-4a87-9197-7c7e6d934273" containerID="8f7cb05b9d3af49770849001c36d685e805846608b1cf7f3b55a96ba7a7ff48c" exitCode=0 Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.488437 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.488436 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" event={"ID":"8a7ad504-0ba1-4a87-9197-7c7e6d934273","Type":"ContainerDied","Data":"8f7cb05b9d3af49770849001c36d685e805846608b1cf7f3b55a96ba7a7ff48c"} Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.488515 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-pmd6s" event={"ID":"8a7ad504-0ba1-4a87-9197-7c7e6d934273","Type":"ContainerDied","Data":"a1defcecded044816796432644585e734006570daffaee5ca50dc97bcd72718d"} Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.488541 4612 scope.go:117] "RemoveContainer" containerID="8f7cb05b9d3af49770849001c36d685e805846608b1cf7f3b55a96ba7a7ff48c" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.491098 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"751ad1b4-cd3c-4616-99ed-9b19fee06ae8","Type":"ContainerStarted","Data":"414229544e971e852d6a0340245ce6f8d4950927062aef429965e0d48f179e34"} Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.491330 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ed3f4a53-f059-46df-999b-53a3e0ae385b" containerName="cinder-scheduler" containerID="cri-o://d3dce5298c8944d9ae593489fdb7e9a915fb059ada43ec0f222a9e183080b823" gracePeriod=30 Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.492383 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ed3f4a53-f059-46df-999b-53a3e0ae385b" containerName="probe" containerID="cri-o://2ee71cc69b7ef059e1ef3059a1d679c0fe4cb1e5725074542c3a0fbd16187feb" gracePeriod=30 Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.552679 4612 scope.go:117] "RemoveContainer" containerID="e5ce43d54b8a23e3e9fdba4205120fc7011d044342118a8dc979ecae3b7fad7e" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.566210 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.566188101 podStartE2EDuration="3.566188101s" podCreationTimestamp="2025-12-03 07:46:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:46:13.531274661 +0000 UTC m=+1136.704632071" watchObservedRunningTime="2025-12-03 07:46:13.566188101 +0000 UTC m=+1136.739545501" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.583582 4612 scope.go:117] "RemoveContainer" containerID="8f7cb05b9d3af49770849001c36d685e805846608b1cf7f3b55a96ba7a7ff48c" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.584882 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-pmd6s"] Dec 03 07:46:13 crc kubenswrapper[4612]: E1203 07:46:13.591333 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f7cb05b9d3af49770849001c36d685e805846608b1cf7f3b55a96ba7a7ff48c\": container with ID starting with 8f7cb05b9d3af49770849001c36d685e805846608b1cf7f3b55a96ba7a7ff48c not found: ID does not exist" containerID="8f7cb05b9d3af49770849001c36d685e805846608b1cf7f3b55a96ba7a7ff48c" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.591393 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f7cb05b9d3af49770849001c36d685e805846608b1cf7f3b55a96ba7a7ff48c"} err="failed to get container status \"8f7cb05b9d3af49770849001c36d685e805846608b1cf7f3b55a96ba7a7ff48c\": rpc error: code = NotFound desc = could not find container \"8f7cb05b9d3af49770849001c36d685e805846608b1cf7f3b55a96ba7a7ff48c\": container with ID starting with 8f7cb05b9d3af49770849001c36d685e805846608b1cf7f3b55a96ba7a7ff48c not found: ID does not exist" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.591422 4612 scope.go:117] "RemoveContainer" containerID="e5ce43d54b8a23e3e9fdba4205120fc7011d044342118a8dc979ecae3b7fad7e" Dec 03 07:46:13 crc kubenswrapper[4612]: E1203 07:46:13.591722 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5ce43d54b8a23e3e9fdba4205120fc7011d044342118a8dc979ecae3b7fad7e\": container with ID starting with e5ce43d54b8a23e3e9fdba4205120fc7011d044342118a8dc979ecae3b7fad7e not found: ID does not exist" containerID="e5ce43d54b8a23e3e9fdba4205120fc7011d044342118a8dc979ecae3b7fad7e" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.591743 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5ce43d54b8a23e3e9fdba4205120fc7011d044342118a8dc979ecae3b7fad7e"} err="failed to get container status \"e5ce43d54b8a23e3e9fdba4205120fc7011d044342118a8dc979ecae3b7fad7e\": rpc error: code = NotFound desc = could not find container \"e5ce43d54b8a23e3e9fdba4205120fc7011d044342118a8dc979ecae3b7fad7e\": container with ID starting with e5ce43d54b8a23e3e9fdba4205120fc7011d044342118a8dc979ecae3b7fad7e not found: ID does not exist" Dec 03 07:46:13 crc kubenswrapper[4612]: I1203 07:46:13.603725 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-pmd6s"] Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.502860 4612 generic.go:334] "Generic (PLEG): container finished" podID="ed3f4a53-f059-46df-999b-53a3e0ae385b" containerID="2ee71cc69b7ef059e1ef3059a1d679c0fe4cb1e5725074542c3a0fbd16187feb" exitCode=0 Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.503822 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ed3f4a53-f059-46df-999b-53a3e0ae385b","Type":"ContainerDied","Data":"2ee71cc69b7ef059e1ef3059a1d679c0fe4cb1e5725074542c3a0fbd16187feb"} Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.503869 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.622316 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 03 07:46:14 crc kubenswrapper[4612]: E1203 07:46:14.622658 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a7ad504-0ba1-4a87-9197-7c7e6d934273" containerName="dnsmasq-dns" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.622674 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a7ad504-0ba1-4a87-9197-7c7e6d934273" containerName="dnsmasq-dns" Dec 03 07:46:14 crc kubenswrapper[4612]: E1203 07:46:14.622701 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a7ad504-0ba1-4a87-9197-7c7e6d934273" containerName="init" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.622708 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a7ad504-0ba1-4a87-9197-7c7e6d934273" containerName="init" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.622867 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a7ad504-0ba1-4a87-9197-7c7e6d934273" containerName="dnsmasq-dns" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.623482 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.628335 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.628489 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.629501 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-j6dxd" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.641545 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.653966 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b63827f-76e8-454f-9243-6c05f9e3c2fd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6b63827f-76e8-454f-9243-6c05f9e3c2fd\") " pod="openstack/openstackclient" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.654019 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6b63827f-76e8-454f-9243-6c05f9e3c2fd-openstack-config\") pod \"openstackclient\" (UID: \"6b63827f-76e8-454f-9243-6c05f9e3c2fd\") " pod="openstack/openstackclient" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.654038 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6b63827f-76e8-454f-9243-6c05f9e3c2fd-openstack-config-secret\") pod \"openstackclient\" (UID: \"6b63827f-76e8-454f-9243-6c05f9e3c2fd\") " pod="openstack/openstackclient" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.654076 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqv6x\" (UniqueName: \"kubernetes.io/projected/6b63827f-76e8-454f-9243-6c05f9e3c2fd-kube-api-access-sqv6x\") pod \"openstackclient\" (UID: \"6b63827f-76e8-454f-9243-6c05f9e3c2fd\") " pod="openstack/openstackclient" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.755453 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b63827f-76e8-454f-9243-6c05f9e3c2fd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6b63827f-76e8-454f-9243-6c05f9e3c2fd\") " pod="openstack/openstackclient" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.755512 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6b63827f-76e8-454f-9243-6c05f9e3c2fd-openstack-config\") pod \"openstackclient\" (UID: \"6b63827f-76e8-454f-9243-6c05f9e3c2fd\") " pod="openstack/openstackclient" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.755536 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6b63827f-76e8-454f-9243-6c05f9e3c2fd-openstack-config-secret\") pod \"openstackclient\" (UID: \"6b63827f-76e8-454f-9243-6c05f9e3c2fd\") " pod="openstack/openstackclient" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.755587 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqv6x\" (UniqueName: \"kubernetes.io/projected/6b63827f-76e8-454f-9243-6c05f9e3c2fd-kube-api-access-sqv6x\") pod \"openstackclient\" (UID: \"6b63827f-76e8-454f-9243-6c05f9e3c2fd\") " pod="openstack/openstackclient" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.757824 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6b63827f-76e8-454f-9243-6c05f9e3c2fd-openstack-config\") pod \"openstackclient\" (UID: \"6b63827f-76e8-454f-9243-6c05f9e3c2fd\") " pod="openstack/openstackclient" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.762254 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6b63827f-76e8-454f-9243-6c05f9e3c2fd-openstack-config-secret\") pod \"openstackclient\" (UID: \"6b63827f-76e8-454f-9243-6c05f9e3c2fd\") " pod="openstack/openstackclient" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.765568 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b63827f-76e8-454f-9243-6c05f9e3c2fd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6b63827f-76e8-454f-9243-6c05f9e3c2fd\") " pod="openstack/openstackclient" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.777659 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqv6x\" (UniqueName: \"kubernetes.io/projected/6b63827f-76e8-454f-9243-6c05f9e3c2fd-kube-api-access-sqv6x\") pod \"openstackclient\" (UID: \"6b63827f-76e8-454f-9243-6c05f9e3c2fd\") " pod="openstack/openstackclient" Dec 03 07:46:14 crc kubenswrapper[4612]: I1203 07:46:14.942666 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.101188 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a7ad504-0ba1-4a87-9197-7c7e6d934273" path="/var/lib/kubelet/pods/8a7ad504-0ba1-4a87-9197-7c7e6d934273/volumes" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.424290 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.468982 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-config-data-custom\") pod \"ed3f4a53-f059-46df-999b-53a3e0ae385b\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.469054 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-config-data\") pod \"ed3f4a53-f059-46df-999b-53a3e0ae385b\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.469097 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppzdv\" (UniqueName: \"kubernetes.io/projected/ed3f4a53-f059-46df-999b-53a3e0ae385b-kube-api-access-ppzdv\") pod \"ed3f4a53-f059-46df-999b-53a3e0ae385b\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.469127 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-combined-ca-bundle\") pod \"ed3f4a53-f059-46df-999b-53a3e0ae385b\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.469277 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed3f4a53-f059-46df-999b-53a3e0ae385b-etc-machine-id\") pod \"ed3f4a53-f059-46df-999b-53a3e0ae385b\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.469323 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-scripts\") pod \"ed3f4a53-f059-46df-999b-53a3e0ae385b\" (UID: \"ed3f4a53-f059-46df-999b-53a3e0ae385b\") " Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.470042 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed3f4a53-f059-46df-999b-53a3e0ae385b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ed3f4a53-f059-46df-999b-53a3e0ae385b" (UID: "ed3f4a53-f059-46df-999b-53a3e0ae385b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.480462 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed3f4a53-f059-46df-999b-53a3e0ae385b-kube-api-access-ppzdv" (OuterVolumeSpecName: "kube-api-access-ppzdv") pod "ed3f4a53-f059-46df-999b-53a3e0ae385b" (UID: "ed3f4a53-f059-46df-999b-53a3e0ae385b"). InnerVolumeSpecName "kube-api-access-ppzdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.494326 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ed3f4a53-f059-46df-999b-53a3e0ae385b" (UID: "ed3f4a53-f059-46df-999b-53a3e0ae385b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.495305 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-scripts" (OuterVolumeSpecName: "scripts") pod "ed3f4a53-f059-46df-999b-53a3e0ae385b" (UID: "ed3f4a53-f059-46df-999b-53a3e0ae385b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.539988 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.541916 4612 generic.go:334] "Generic (PLEG): container finished" podID="ed3f4a53-f059-46df-999b-53a3e0ae385b" containerID="d3dce5298c8944d9ae593489fdb7e9a915fb059ada43ec0f222a9e183080b823" exitCode=0 Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.542908 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.543457 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ed3f4a53-f059-46df-999b-53a3e0ae385b","Type":"ContainerDied","Data":"d3dce5298c8944d9ae593489fdb7e9a915fb059ada43ec0f222a9e183080b823"} Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.543496 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ed3f4a53-f059-46df-999b-53a3e0ae385b","Type":"ContainerDied","Data":"fe6cc12b293e0a5360963f3a80fa80dc0ff28e0b5014ece131847ad87368e6bc"} Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.543519 4612 scope.go:117] "RemoveContainer" containerID="2ee71cc69b7ef059e1ef3059a1d679c0fe4cb1e5725074542c3a0fbd16187feb" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.582011 4612 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed3f4a53-f059-46df-999b-53a3e0ae385b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.583660 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.583687 4612 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.583698 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppzdv\" (UniqueName: \"kubernetes.io/projected/ed3f4a53-f059-46df-999b-53a3e0ae385b-kube-api-access-ppzdv\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.597729 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ed3f4a53-f059-46df-999b-53a3e0ae385b" (UID: "ed3f4a53-f059-46df-999b-53a3e0ae385b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.677447 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-config-data" (OuterVolumeSpecName: "config-data") pod "ed3f4a53-f059-46df-999b-53a3e0ae385b" (UID: "ed3f4a53-f059-46df-999b-53a3e0ae385b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.688033 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.688061 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed3f4a53-f059-46df-999b-53a3e0ae385b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.729057 4612 scope.go:117] "RemoveContainer" containerID="d3dce5298c8944d9ae593489fdb7e9a915fb059ada43ec0f222a9e183080b823" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.749163 4612 scope.go:117] "RemoveContainer" containerID="2ee71cc69b7ef059e1ef3059a1d679c0fe4cb1e5725074542c3a0fbd16187feb" Dec 03 07:46:15 crc kubenswrapper[4612]: E1203 07:46:15.749665 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ee71cc69b7ef059e1ef3059a1d679c0fe4cb1e5725074542c3a0fbd16187feb\": container with ID starting with 2ee71cc69b7ef059e1ef3059a1d679c0fe4cb1e5725074542c3a0fbd16187feb not found: ID does not exist" containerID="2ee71cc69b7ef059e1ef3059a1d679c0fe4cb1e5725074542c3a0fbd16187feb" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.749708 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ee71cc69b7ef059e1ef3059a1d679c0fe4cb1e5725074542c3a0fbd16187feb"} err="failed to get container status \"2ee71cc69b7ef059e1ef3059a1d679c0fe4cb1e5725074542c3a0fbd16187feb\": rpc error: code = NotFound desc = could not find container \"2ee71cc69b7ef059e1ef3059a1d679c0fe4cb1e5725074542c3a0fbd16187feb\": container with ID starting with 2ee71cc69b7ef059e1ef3059a1d679c0fe4cb1e5725074542c3a0fbd16187feb not found: ID does not exist" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.749739 4612 scope.go:117] "RemoveContainer" containerID="d3dce5298c8944d9ae593489fdb7e9a915fb059ada43ec0f222a9e183080b823" Dec 03 07:46:15 crc kubenswrapper[4612]: E1203 07:46:15.750079 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3dce5298c8944d9ae593489fdb7e9a915fb059ada43ec0f222a9e183080b823\": container with ID starting with d3dce5298c8944d9ae593489fdb7e9a915fb059ada43ec0f222a9e183080b823 not found: ID does not exist" containerID="d3dce5298c8944d9ae593489fdb7e9a915fb059ada43ec0f222a9e183080b823" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.750120 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3dce5298c8944d9ae593489fdb7e9a915fb059ada43ec0f222a9e183080b823"} err="failed to get container status \"d3dce5298c8944d9ae593489fdb7e9a915fb059ada43ec0f222a9e183080b823\": rpc error: code = NotFound desc = could not find container \"d3dce5298c8944d9ae593489fdb7e9a915fb059ada43ec0f222a9e183080b823\": container with ID starting with d3dce5298c8944d9ae593489fdb7e9a915fb059ada43ec0f222a9e183080b823 not found: ID does not exist" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.881086 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.887475 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.906714 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 07:46:15 crc kubenswrapper[4612]: E1203 07:46:15.907571 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed3f4a53-f059-46df-999b-53a3e0ae385b" containerName="probe" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.907590 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed3f4a53-f059-46df-999b-53a3e0ae385b" containerName="probe" Dec 03 07:46:15 crc kubenswrapper[4612]: E1203 07:46:15.907604 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed3f4a53-f059-46df-999b-53a3e0ae385b" containerName="cinder-scheduler" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.907611 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed3f4a53-f059-46df-999b-53a3e0ae385b" containerName="cinder-scheduler" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.907975 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed3f4a53-f059-46df-999b-53a3e0ae385b" containerName="probe" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.908013 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed3f4a53-f059-46df-999b-53a3e0ae385b" containerName="cinder-scheduler" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.909532 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.925493 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.928422 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.999363 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6r56\" (UniqueName: \"kubernetes.io/projected/df089922-13b4-43d5-beaf-8dff66c6e7cb-kube-api-access-v6r56\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.999458 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/df089922-13b4-43d5-beaf-8dff66c6e7cb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.999492 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/df089922-13b4-43d5-beaf-8dff66c6e7cb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.999518 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df089922-13b4-43d5-beaf-8dff66c6e7cb-scripts\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.999615 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df089922-13b4-43d5-beaf-8dff66c6e7cb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:15 crc kubenswrapper[4612]: I1203 07:46:15.999695 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df089922-13b4-43d5-beaf-8dff66c6e7cb-config-data\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.101512 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6r56\" (UniqueName: \"kubernetes.io/projected/df089922-13b4-43d5-beaf-8dff66c6e7cb-kube-api-access-v6r56\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.101598 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/df089922-13b4-43d5-beaf-8dff66c6e7cb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.101636 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/df089922-13b4-43d5-beaf-8dff66c6e7cb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.101665 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df089922-13b4-43d5-beaf-8dff66c6e7cb-scripts\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.101733 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df089922-13b4-43d5-beaf-8dff66c6e7cb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.101838 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df089922-13b4-43d5-beaf-8dff66c6e7cb-config-data\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.102077 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/df089922-13b4-43d5-beaf-8dff66c6e7cb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.106573 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/df089922-13b4-43d5-beaf-8dff66c6e7cb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.106729 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df089922-13b4-43d5-beaf-8dff66c6e7cb-config-data\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.108576 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df089922-13b4-43d5-beaf-8dff66c6e7cb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.109187 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df089922-13b4-43d5-beaf-8dff66c6e7cb-scripts\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.125918 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6r56\" (UniqueName: \"kubernetes.io/projected/df089922-13b4-43d5-beaf-8dff66c6e7cb-kube-api-access-v6r56\") pod \"cinder-scheduler-0\" (UID: \"df089922-13b4-43d5-beaf-8dff66c6e7cb\") " pod="openstack/cinder-scheduler-0" Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.234175 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.579365 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"6b63827f-76e8-454f-9243-6c05f9e3c2fd","Type":"ContainerStarted","Data":"c4406e011a49571e83d44d6640bd26c759a9a73fbb51d86c0af42417d354576c"} Dec 03 07:46:16 crc kubenswrapper[4612]: I1203 07:46:16.823196 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 07:46:16 crc kubenswrapper[4612]: W1203 07:46:16.827199 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf089922_13b4_43d5_beaf_8dff66c6e7cb.slice/crio-07a22bc95b952d6cc648786a73fe86777cf6b7917a27272a7502e051ece656f3 WatchSource:0}: Error finding container 07a22bc95b952d6cc648786a73fe86777cf6b7917a27272a7502e051ece656f3: Status 404 returned error can't find the container with id 07a22bc95b952d6cc648786a73fe86777cf6b7917a27272a7502e051ece656f3 Dec 03 07:46:17 crc kubenswrapper[4612]: I1203 07:46:17.111141 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed3f4a53-f059-46df-999b-53a3e0ae385b" path="/var/lib/kubelet/pods/ed3f4a53-f059-46df-999b-53a3e0ae385b/volumes" Dec 03 07:46:17 crc kubenswrapper[4612]: I1203 07:46:17.136832 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:46:17 crc kubenswrapper[4612]: I1203 07:46:17.136912 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:46:17 crc kubenswrapper[4612]: I1203 07:46:17.660148 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"df089922-13b4-43d5-beaf-8dff66c6e7cb","Type":"ContainerStarted","Data":"07a22bc95b952d6cc648786a73fe86777cf6b7917a27272a7502e051ece656f3"} Dec 03 07:46:18 crc kubenswrapper[4612]: I1203 07:46:18.670216 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"df089922-13b4-43d5-beaf-8dff66c6e7cb","Type":"ContainerStarted","Data":"443f0f4998013034eca8052adc5ed9f74b6337c812dff557c885dd6d749185eb"} Dec 03 07:46:18 crc kubenswrapper[4612]: I1203 07:46:18.670527 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"df089922-13b4-43d5-beaf-8dff66c6e7cb","Type":"ContainerStarted","Data":"341e5dccb4e2a914db19cf4485f511b07a4c33c4c1a0c3488c984328e52a1605"} Dec 03 07:46:18 crc kubenswrapper[4612]: I1203 07:46:18.694190 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.694168977 podStartE2EDuration="3.694168977s" podCreationTimestamp="2025-12-03 07:46:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:46:18.688304981 +0000 UTC m=+1141.861662381" watchObservedRunningTime="2025-12-03 07:46:18.694168977 +0000 UTC m=+1141.867526377" Dec 03 07:46:18 crc kubenswrapper[4612]: I1203 07:46:18.930830 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:46:18 crc kubenswrapper[4612]: I1203 07:46:18.932257 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="ceilometer-central-agent" containerID="cri-o://cf62adc9b2e9b0034859b41bece66812579f148487e9c68316b6888c4217cacd" gracePeriod=30 Dec 03 07:46:18 crc kubenswrapper[4612]: I1203 07:46:18.932301 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="proxy-httpd" containerID="cri-o://74f8e78493cd009de97ef69e3650a494f6b07b27b81cef8b588f3b84ce06ec6f" gracePeriod=30 Dec 03 07:46:18 crc kubenswrapper[4612]: I1203 07:46:18.932333 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="ceilometer-notification-agent" containerID="cri-o://264f8814be5076dec578447aeb666a338405ee5d52a94879ef4636da6198ec20" gracePeriod=30 Dec 03 07:46:18 crc kubenswrapper[4612]: I1203 07:46:18.932285 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="sg-core" containerID="cri-o://23e6de29c7fb5a16a4445db935ed2bc8c94e0e529ca22f31bb6cfd5b6997c820" gracePeriod=30 Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.741559 4612 generic.go:334] "Generic (PLEG): container finished" podID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerID="74f8e78493cd009de97ef69e3650a494f6b07b27b81cef8b588f3b84ce06ec6f" exitCode=0 Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.741852 4612 generic.go:334] "Generic (PLEG): container finished" podID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerID="23e6de29c7fb5a16a4445db935ed2bc8c94e0e529ca22f31bb6cfd5b6997c820" exitCode=2 Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.741860 4612 generic.go:334] "Generic (PLEG): container finished" podID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerID="264f8814be5076dec578447aeb666a338405ee5d52a94879ef4636da6198ec20" exitCode=0 Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.741867 4612 generic.go:334] "Generic (PLEG): container finished" podID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerID="cf62adc9b2e9b0034859b41bece66812579f148487e9c68316b6888c4217cacd" exitCode=0 Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.742691 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3d66811-da8a-485c-9dd8-092f29388b2a","Type":"ContainerDied","Data":"74f8e78493cd009de97ef69e3650a494f6b07b27b81cef8b588f3b84ce06ec6f"} Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.742717 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3d66811-da8a-485c-9dd8-092f29388b2a","Type":"ContainerDied","Data":"23e6de29c7fb5a16a4445db935ed2bc8c94e0e529ca22f31bb6cfd5b6997c820"} Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.742727 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3d66811-da8a-485c-9dd8-092f29388b2a","Type":"ContainerDied","Data":"264f8814be5076dec578447aeb666a338405ee5d52a94879ef4636da6198ec20"} Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.742735 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3d66811-da8a-485c-9dd8-092f29388b2a","Type":"ContainerDied","Data":"cf62adc9b2e9b0034859b41bece66812579f148487e9c68316b6888c4217cacd"} Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.854053 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.889166 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3d66811-da8a-485c-9dd8-092f29388b2a-run-httpd\") pod \"b3d66811-da8a-485c-9dd8-092f29388b2a\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.889320 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3d66811-da8a-485c-9dd8-092f29388b2a-log-httpd\") pod \"b3d66811-da8a-485c-9dd8-092f29388b2a\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.889360 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-combined-ca-bundle\") pod \"b3d66811-da8a-485c-9dd8-092f29388b2a\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.889385 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-sg-core-conf-yaml\") pod \"b3d66811-da8a-485c-9dd8-092f29388b2a\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.889405 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-scripts\") pod \"b3d66811-da8a-485c-9dd8-092f29388b2a\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.889483 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc9p4\" (UniqueName: \"kubernetes.io/projected/b3d66811-da8a-485c-9dd8-092f29388b2a-kube-api-access-gc9p4\") pod \"b3d66811-da8a-485c-9dd8-092f29388b2a\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.889539 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-config-data\") pod \"b3d66811-da8a-485c-9dd8-092f29388b2a\" (UID: \"b3d66811-da8a-485c-9dd8-092f29388b2a\") " Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.889764 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3d66811-da8a-485c-9dd8-092f29388b2a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b3d66811-da8a-485c-9dd8-092f29388b2a" (UID: "b3d66811-da8a-485c-9dd8-092f29388b2a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.890487 4612 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3d66811-da8a-485c-9dd8-092f29388b2a-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.891103 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3d66811-da8a-485c-9dd8-092f29388b2a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b3d66811-da8a-485c-9dd8-092f29388b2a" (UID: "b3d66811-da8a-485c-9dd8-092f29388b2a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.903679 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3d66811-da8a-485c-9dd8-092f29388b2a-kube-api-access-gc9p4" (OuterVolumeSpecName: "kube-api-access-gc9p4") pod "b3d66811-da8a-485c-9dd8-092f29388b2a" (UID: "b3d66811-da8a-485c-9dd8-092f29388b2a"). InnerVolumeSpecName "kube-api-access-gc9p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.903777 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-scripts" (OuterVolumeSpecName: "scripts") pod "b3d66811-da8a-485c-9dd8-092f29388b2a" (UID: "b3d66811-da8a-485c-9dd8-092f29388b2a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.951208 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b3d66811-da8a-485c-9dd8-092f29388b2a" (UID: "b3d66811-da8a-485c-9dd8-092f29388b2a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.991863 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.991892 4612 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.991903 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc9p4\" (UniqueName: \"kubernetes.io/projected/b3d66811-da8a-485c-9dd8-092f29388b2a-kube-api-access-gc9p4\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.991912 4612 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3d66811-da8a-485c-9dd8-092f29388b2a-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:19 crc kubenswrapper[4612]: I1203 07:46:19.995962 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-config-data" (OuterVolumeSpecName: "config-data") pod "b3d66811-da8a-485c-9dd8-092f29388b2a" (UID: "b3d66811-da8a-485c-9dd8-092f29388b2a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.042055 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3d66811-da8a-485c-9dd8-092f29388b2a" (UID: "b3d66811-da8a-485c-9dd8-092f29388b2a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.093856 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.094241 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3d66811-da8a-485c-9dd8-092f29388b2a-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.380303 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-7b57f75fd5-642lv"] Dec 03 07:46:20 crc kubenswrapper[4612]: E1203 07:46:20.380900 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="ceilometer-notification-agent" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.380978 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="ceilometer-notification-agent" Dec 03 07:46:20 crc kubenswrapper[4612]: E1203 07:46:20.381056 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="proxy-httpd" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.381115 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="proxy-httpd" Dec 03 07:46:20 crc kubenswrapper[4612]: E1203 07:46:20.381181 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="sg-core" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.381230 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="sg-core" Dec 03 07:46:20 crc kubenswrapper[4612]: E1203 07:46:20.381284 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="ceilometer-central-agent" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.381333 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="ceilometer-central-agent" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.381546 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="sg-core" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.381612 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="ceilometer-central-agent" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.381684 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="ceilometer-notification-agent" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.381740 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" containerName="proxy-httpd" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.382727 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.388176 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.388391 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.388527 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.457187 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-7b57f75fd5-642lv"] Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.500279 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2528552f-220d-4b33-990a-7793d5d8987a-log-httpd\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.500371 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2528552f-220d-4b33-990a-7793d5d8987a-internal-tls-certs\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.500419 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgncl\" (UniqueName: \"kubernetes.io/projected/2528552f-220d-4b33-990a-7793d5d8987a-kube-api-access-fgncl\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.500441 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2528552f-220d-4b33-990a-7793d5d8987a-run-httpd\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.500459 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2528552f-220d-4b33-990a-7793d5d8987a-combined-ca-bundle\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.500478 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2528552f-220d-4b33-990a-7793d5d8987a-public-tls-certs\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.500498 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2528552f-220d-4b33-990a-7793d5d8987a-config-data\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.500516 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2528552f-220d-4b33-990a-7793d5d8987a-etc-swift\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.601892 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2528552f-220d-4b33-990a-7793d5d8987a-log-httpd\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.602035 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2528552f-220d-4b33-990a-7793d5d8987a-internal-tls-certs\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.602094 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgncl\" (UniqueName: \"kubernetes.io/projected/2528552f-220d-4b33-990a-7793d5d8987a-kube-api-access-fgncl\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.602121 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2528552f-220d-4b33-990a-7793d5d8987a-run-httpd\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.602140 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2528552f-220d-4b33-990a-7793d5d8987a-combined-ca-bundle\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.602158 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2528552f-220d-4b33-990a-7793d5d8987a-public-tls-certs\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.602175 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2528552f-220d-4b33-990a-7793d5d8987a-config-data\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.602191 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2528552f-220d-4b33-990a-7793d5d8987a-etc-swift\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.603492 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2528552f-220d-4b33-990a-7793d5d8987a-log-httpd\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.603902 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2528552f-220d-4b33-990a-7793d5d8987a-run-httpd\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.607796 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2528552f-220d-4b33-990a-7793d5d8987a-etc-swift\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.610206 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2528552f-220d-4b33-990a-7793d5d8987a-public-tls-certs\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.610449 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2528552f-220d-4b33-990a-7793d5d8987a-internal-tls-certs\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.611914 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2528552f-220d-4b33-990a-7793d5d8987a-config-data\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.617898 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2528552f-220d-4b33-990a-7793d5d8987a-combined-ca-bundle\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.643740 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgncl\" (UniqueName: \"kubernetes.io/projected/2528552f-220d-4b33-990a-7793d5d8987a-kube-api-access-fgncl\") pod \"swift-proxy-7b57f75fd5-642lv\" (UID: \"2528552f-220d-4b33-990a-7793d5d8987a\") " pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.705523 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.772394 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3d66811-da8a-485c-9dd8-092f29388b2a","Type":"ContainerDied","Data":"0d497bb8ce0a45b0af6a30526b71f6af35b5fdcef51c4162e322a6dce5c54aa4"} Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.772465 4612 scope.go:117] "RemoveContainer" containerID="74f8e78493cd009de97ef69e3650a494f6b07b27b81cef8b588f3b84ce06ec6f" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.772495 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.869250 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.878632 4612 scope.go:117] "RemoveContainer" containerID="23e6de29c7fb5a16a4445db935ed2bc8c94e0e529ca22f31bb6cfd5b6997c820" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.900539 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.928645 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.931497 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.935400 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.935658 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.941792 4612 scope.go:117] "RemoveContainer" containerID="264f8814be5076dec578447aeb666a338405ee5d52a94879ef4636da6198ec20" Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.942262 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:46:20 crc kubenswrapper[4612]: I1203 07:46:20.984287 4612 scope.go:117] "RemoveContainer" containerID="cf62adc9b2e9b0034859b41bece66812579f148487e9c68316b6888c4217cacd" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.011740 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.011812 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-log-httpd\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.011914 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-run-httpd\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.011973 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.011994 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6j6nb\" (UniqueName: \"kubernetes.io/projected/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-kube-api-access-6j6nb\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.012016 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-scripts\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.012054 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-config-data\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.109738 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3d66811-da8a-485c-9dd8-092f29388b2a" path="/var/lib/kubelet/pods/b3d66811-da8a-485c-9dd8-092f29388b2a/volumes" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.114211 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.114270 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6j6nb\" (UniqueName: \"kubernetes.io/projected/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-kube-api-access-6j6nb\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.114301 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-scripts\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.114373 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-config-data\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.114461 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.114489 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-log-httpd\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.114609 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-run-httpd\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.115565 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-log-httpd\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.116637 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-run-httpd\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.123488 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-scripts\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.124058 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-config-data\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.124608 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.125318 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.138990 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6j6nb\" (UniqueName: \"kubernetes.io/projected/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-kube-api-access-6j6nb\") pod \"ceilometer-0\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.236536 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.274722 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.456209 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-7b57f75fd5-642lv"] Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.796448 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7b57f75fd5-642lv" event={"ID":"2528552f-220d-4b33-990a-7793d5d8987a","Type":"ContainerStarted","Data":"c07b0e7fc1d2c0197e11d95f9e394d293a0ac2ec8baab282ece4f4c83b54319b"} Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.796650 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7b57f75fd5-642lv" event={"ID":"2528552f-220d-4b33-990a-7793d5d8987a","Type":"ContainerStarted","Data":"4c21321fa7d24176d3090fb07d06c2ff47cea1aee455bfed8f9b13d927b3cd26"} Dec 03 07:46:21 crc kubenswrapper[4612]: I1203 07:46:21.820395 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:46:22 crc kubenswrapper[4612]: I1203 07:46:22.810220 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7b57f75fd5-642lv" event={"ID":"2528552f-220d-4b33-990a-7793d5d8987a","Type":"ContainerStarted","Data":"3f8e58962df08e6438d1379577d4e18ceb3d358ec131e186c5d9df7517358cf3"} Dec 03 07:46:22 crc kubenswrapper[4612]: I1203 07:46:22.810891 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:22 crc kubenswrapper[4612]: I1203 07:46:22.810927 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:22 crc kubenswrapper[4612]: I1203 07:46:22.813199 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c","Type":"ContainerStarted","Data":"c1238e0e10df990d51a5392e1ac8cc525b17caae14e5a022cd268c25852865a5"} Dec 03 07:46:22 crc kubenswrapper[4612]: I1203 07:46:22.813229 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c","Type":"ContainerStarted","Data":"0a1a17a5c5b80643d25a985675d542a66d97c311f15c58c65cd6e0829e15d45e"} Dec 03 07:46:22 crc kubenswrapper[4612]: I1203 07:46:22.842784 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-7b57f75fd5-642lv" podStartSLOduration=2.8427668280000002 podStartE2EDuration="2.842766828s" podCreationTimestamp="2025-12-03 07:46:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:46:22.825740904 +0000 UTC m=+1145.999098314" watchObservedRunningTime="2025-12-03 07:46:22.842766828 +0000 UTC m=+1146.016124228" Dec 03 07:46:23 crc kubenswrapper[4612]: I1203 07:46:23.839625 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c","Type":"ContainerStarted","Data":"c94b9407bcaedcc29ef406c2261b439c451ecdbfc29c35428a5715365fc13fd0"} Dec 03 07:46:23 crc kubenswrapper[4612]: I1203 07:46:23.881003 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 03 07:46:26 crc kubenswrapper[4612]: I1203 07:46:26.606380 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 03 07:46:29 crc kubenswrapper[4612]: I1203 07:46:29.396490 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:46:30 crc kubenswrapper[4612]: I1203 07:46:30.716467 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:30 crc kubenswrapper[4612]: I1203 07:46:30.718241 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-7b57f75fd5-642lv" Dec 03 07:46:30 crc kubenswrapper[4612]: I1203 07:46:30.930731 4612 generic.go:334] "Generic (PLEG): container finished" podID="7462fb55-15b7-4416-b34f-23893766b5ed" containerID="4ab7257fb81ce6e4c6ccbe3f788b4aeba206cf28cb86b4855a2f3df4b1918a01" exitCode=137 Dec 03 07:46:30 crc kubenswrapper[4612]: I1203 07:46:30.930812 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-767d79bd88-5spkc" event={"ID":"7462fb55-15b7-4416-b34f-23893766b5ed","Type":"ContainerDied","Data":"4ab7257fb81ce6e4c6ccbe3f788b4aeba206cf28cb86b4855a2f3df4b1918a01"} Dec 03 07:46:31 crc kubenswrapper[4612]: E1203 07:46:31.242533 4612 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29d52104_a465_4ca0_a040_d9dba9e47600.slice/crio-59d1c0b708def9bcaa0ff30604b6f69b90dc88a40d4c4565b1e6c09b0bb13d74.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7462fb55_15b7_4416_b34f_23893766b5ed.slice/crio-4ab7257fb81ce6e4c6ccbe3f788b4aeba206cf28cb86b4855a2f3df4b1918a01.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29d52104_a465_4ca0_a040_d9dba9e47600.slice/crio-conmon-59d1c0b708def9bcaa0ff30604b6f69b90dc88a40d4c4565b1e6c09b0bb13d74.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7462fb55_15b7_4416_b34f_23893766b5ed.slice/crio-conmon-4ab7257fb81ce6e4c6ccbe3f788b4aeba206cf28cb86b4855a2f3df4b1918a01.scope\": RecentStats: unable to find data in memory cache]" Dec 03 07:46:31 crc kubenswrapper[4612]: I1203 07:46:31.945921 4612 generic.go:334] "Generic (PLEG): container finished" podID="29d52104-a465-4ca0-a040-d9dba9e47600" containerID="59d1c0b708def9bcaa0ff30604b6f69b90dc88a40d4c4565b1e6c09b0bb13d74" exitCode=137 Dec 03 07:46:31 crc kubenswrapper[4612]: I1203 07:46:31.946005 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55fc5c6c94-pjh5s" event={"ID":"29d52104-a465-4ca0-a040-d9dba9e47600","Type":"ContainerDied","Data":"59d1c0b708def9bcaa0ff30604b6f69b90dc88a40d4c4565b1e6c09b0bb13d74"} Dec 03 07:46:32 crc kubenswrapper[4612]: I1203 07:46:32.955050 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"6b63827f-76e8-454f-9243-6c05f9e3c2fd","Type":"ContainerStarted","Data":"780ae37602bba213da1003afe1baeb3c92d6e5bbabda2eb63048da8816923a82"} Dec 03 07:46:32 crc kubenswrapper[4612]: I1203 07:46:32.956902 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c","Type":"ContainerStarted","Data":"1a50efea835451c97ea6190e5fa7f89735d966c21ab5ee910d8453c61e545b0c"} Dec 03 07:46:32 crc kubenswrapper[4612]: I1203 07:46:32.959272 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-767d79bd88-5spkc" event={"ID":"7462fb55-15b7-4416-b34f-23893766b5ed","Type":"ContainerStarted","Data":"6845b3347b561dff2c7aaf840fb49d55a2770102726dd8f15710394a1f37b660"} Dec 03 07:46:32 crc kubenswrapper[4612]: I1203 07:46:32.961332 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55fc5c6c94-pjh5s" event={"ID":"29d52104-a465-4ca0-a040-d9dba9e47600","Type":"ContainerStarted","Data":"f97facba439d2f44d131bb99645bdbf50386a7e19a0365adfc55704c0d93125d"} Dec 03 07:46:33 crc kubenswrapper[4612]: I1203 07:46:33.032504 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.589587781 podStartE2EDuration="19.03248621s" podCreationTimestamp="2025-12-03 07:46:14 +0000 UTC" firstStartedPulling="2025-12-03 07:46:15.547802871 +0000 UTC m=+1138.721160271" lastFinishedPulling="2025-12-03 07:46:31.9907013 +0000 UTC m=+1155.164058700" observedRunningTime="2025-12-03 07:46:32.992292698 +0000 UTC m=+1156.165650098" watchObservedRunningTime="2025-12-03 07:46:33.03248621 +0000 UTC m=+1156.205843610" Dec 03 07:46:33 crc kubenswrapper[4612]: I1203 07:46:33.973838 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="ceilometer-central-agent" containerID="cri-o://c1238e0e10df990d51a5392e1ac8cc525b17caae14e5a022cd268c25852865a5" gracePeriod=30 Dec 03 07:46:33 crc kubenswrapper[4612]: I1203 07:46:33.974385 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c","Type":"ContainerStarted","Data":"6e0a33bdde4cc42f2d284c011e4f75a9b9bb2e96987f2286cb5d78e984db5bda"} Dec 03 07:46:33 crc kubenswrapper[4612]: I1203 07:46:33.974855 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="proxy-httpd" containerID="cri-o://6e0a33bdde4cc42f2d284c011e4f75a9b9bb2e96987f2286cb5d78e984db5bda" gracePeriod=30 Dec 03 07:46:33 crc kubenswrapper[4612]: I1203 07:46:33.974884 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 07:46:33 crc kubenswrapper[4612]: I1203 07:46:33.974908 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="sg-core" containerID="cri-o://1a50efea835451c97ea6190e5fa7f89735d966c21ab5ee910d8453c61e545b0c" gracePeriod=30 Dec 03 07:46:33 crc kubenswrapper[4612]: I1203 07:46:33.974958 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="ceilometer-notification-agent" containerID="cri-o://c94b9407bcaedcc29ef406c2261b439c451ecdbfc29c35428a5715365fc13fd0" gracePeriod=30 Dec 03 07:46:34 crc kubenswrapper[4612]: I1203 07:46:34.006922 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.460285336 podStartE2EDuration="14.006907332s" podCreationTimestamp="2025-12-03 07:46:20 +0000 UTC" firstStartedPulling="2025-12-03 07:46:21.829881747 +0000 UTC m=+1145.003239147" lastFinishedPulling="2025-12-03 07:46:33.376503743 +0000 UTC m=+1156.549861143" observedRunningTime="2025-12-03 07:46:34.005651601 +0000 UTC m=+1157.179009001" watchObservedRunningTime="2025-12-03 07:46:34.006907332 +0000 UTC m=+1157.180264732" Dec 03 07:46:34 crc kubenswrapper[4612]: I1203 07:46:34.984779 4612 generic.go:334] "Generic (PLEG): container finished" podID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerID="1a50efea835451c97ea6190e5fa7f89735d966c21ab5ee910d8453c61e545b0c" exitCode=2 Dec 03 07:46:34 crc kubenswrapper[4612]: I1203 07:46:34.985091 4612 generic.go:334] "Generic (PLEG): container finished" podID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerID="c94b9407bcaedcc29ef406c2261b439c451ecdbfc29c35428a5715365fc13fd0" exitCode=0 Dec 03 07:46:34 crc kubenswrapper[4612]: I1203 07:46:34.984861 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c","Type":"ContainerDied","Data":"1a50efea835451c97ea6190e5fa7f89735d966c21ab5ee910d8453c61e545b0c"} Dec 03 07:46:34 crc kubenswrapper[4612]: I1203 07:46:34.985132 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c","Type":"ContainerDied","Data":"c94b9407bcaedcc29ef406c2261b439c451ecdbfc29c35428a5715365fc13fd0"} Dec 03 07:46:36 crc kubenswrapper[4612]: I1203 07:46:36.000194 4612 generic.go:334] "Generic (PLEG): container finished" podID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerID="c1238e0e10df990d51a5392e1ac8cc525b17caae14e5a022cd268c25852865a5" exitCode=0 Dec 03 07:46:36 crc kubenswrapper[4612]: I1203 07:46:36.000246 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c","Type":"ContainerDied","Data":"c1238e0e10df990d51a5392e1ac8cc525b17caae14e5a022cd268c25852865a5"} Dec 03 07:46:39 crc kubenswrapper[4612]: I1203 07:46:39.931019 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-nnmgl"] Dec 03 07:46:39 crc kubenswrapper[4612]: I1203 07:46:39.934174 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-nnmgl" Dec 03 07:46:39 crc kubenswrapper[4612]: I1203 07:46:39.942179 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-nnmgl"] Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.016680 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-krjbf"] Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.017680 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-krjbf" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.029920 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-krjbf"] Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.105292 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lssht\" (UniqueName: \"kubernetes.io/projected/f5a6e38a-a03f-4803-82e6-1acdd1b843ed-kube-api-access-lssht\") pod \"nova-cell0-db-create-krjbf\" (UID: \"f5a6e38a-a03f-4803-82e6-1acdd1b843ed\") " pod="openstack/nova-cell0-db-create-krjbf" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.105373 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvx6t\" (UniqueName: \"kubernetes.io/projected/f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67-kube-api-access-rvx6t\") pod \"nova-api-db-create-nnmgl\" (UID: \"f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67\") " pod="openstack/nova-api-db-create-nnmgl" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.105442 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5a6e38a-a03f-4803-82e6-1acdd1b843ed-operator-scripts\") pod \"nova-cell0-db-create-krjbf\" (UID: \"f5a6e38a-a03f-4803-82e6-1acdd1b843ed\") " pod="openstack/nova-cell0-db-create-krjbf" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.105491 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67-operator-scripts\") pod \"nova-api-db-create-nnmgl\" (UID: \"f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67\") " pod="openstack/nova-api-db-create-nnmgl" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.137535 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-357d-account-create-update-cqp62"] Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.142145 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-357d-account-create-update-cqp62" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.144296 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.159063 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-357d-account-create-update-cqp62"] Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.206851 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5a6e38a-a03f-4803-82e6-1acdd1b843ed-operator-scripts\") pod \"nova-cell0-db-create-krjbf\" (UID: \"f5a6e38a-a03f-4803-82e6-1acdd1b843ed\") " pod="openstack/nova-cell0-db-create-krjbf" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.207180 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67-operator-scripts\") pod \"nova-api-db-create-nnmgl\" (UID: \"f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67\") " pod="openstack/nova-api-db-create-nnmgl" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.207221 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lssht\" (UniqueName: \"kubernetes.io/projected/f5a6e38a-a03f-4803-82e6-1acdd1b843ed-kube-api-access-lssht\") pod \"nova-cell0-db-create-krjbf\" (UID: \"f5a6e38a-a03f-4803-82e6-1acdd1b843ed\") " pod="openstack/nova-cell0-db-create-krjbf" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.208004 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67-operator-scripts\") pod \"nova-api-db-create-nnmgl\" (UID: \"f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67\") " pod="openstack/nova-api-db-create-nnmgl" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.208112 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5a6e38a-a03f-4803-82e6-1acdd1b843ed-operator-scripts\") pod \"nova-cell0-db-create-krjbf\" (UID: \"f5a6e38a-a03f-4803-82e6-1acdd1b843ed\") " pod="openstack/nova-cell0-db-create-krjbf" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.208161 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvx6t\" (UniqueName: \"kubernetes.io/projected/f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67-kube-api-access-rvx6t\") pod \"nova-api-db-create-nnmgl\" (UID: \"f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67\") " pod="openstack/nova-api-db-create-nnmgl" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.235529 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lssht\" (UniqueName: \"kubernetes.io/projected/f5a6e38a-a03f-4803-82e6-1acdd1b843ed-kube-api-access-lssht\") pod \"nova-cell0-db-create-krjbf\" (UID: \"f5a6e38a-a03f-4803-82e6-1acdd1b843ed\") " pod="openstack/nova-cell0-db-create-krjbf" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.238449 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvx6t\" (UniqueName: \"kubernetes.io/projected/f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67-kube-api-access-rvx6t\") pod \"nova-api-db-create-nnmgl\" (UID: \"f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67\") " pod="openstack/nova-api-db-create-nnmgl" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.259654 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-zpc84"] Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.261137 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zpc84" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.264779 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-nnmgl" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.309478 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ln868\" (UniqueName: \"kubernetes.io/projected/4b0e8baf-18db-482a-994e-6ccd87671fee-kube-api-access-ln868\") pod \"nova-cell1-db-create-zpc84\" (UID: \"4b0e8baf-18db-482a-994e-6ccd87671fee\") " pod="openstack/nova-cell1-db-create-zpc84" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.309535 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b0e8baf-18db-482a-994e-6ccd87671fee-operator-scripts\") pod \"nova-cell1-db-create-zpc84\" (UID: \"4b0e8baf-18db-482a-994e-6ccd87671fee\") " pod="openstack/nova-cell1-db-create-zpc84" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.309598 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27qbb\" (UniqueName: \"kubernetes.io/projected/75c9c6ea-f164-4f1d-b907-681771c086e5-kube-api-access-27qbb\") pod \"nova-api-357d-account-create-update-cqp62\" (UID: \"75c9c6ea-f164-4f1d-b907-681771c086e5\") " pod="openstack/nova-api-357d-account-create-update-cqp62" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.309636 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75c9c6ea-f164-4f1d-b907-681771c086e5-operator-scripts\") pod \"nova-api-357d-account-create-update-cqp62\" (UID: \"75c9c6ea-f164-4f1d-b907-681771c086e5\") " pod="openstack/nova-api-357d-account-create-update-cqp62" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.310786 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-zpc84"] Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.360382 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-krjbf" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.396064 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-2cf4-account-create-update-6p9qp"] Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.397473 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2cf4-account-create-update-6p9qp" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.402562 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.417000 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ln868\" (UniqueName: \"kubernetes.io/projected/4b0e8baf-18db-482a-994e-6ccd87671fee-kube-api-access-ln868\") pod \"nova-cell1-db-create-zpc84\" (UID: \"4b0e8baf-18db-482a-994e-6ccd87671fee\") " pod="openstack/nova-cell1-db-create-zpc84" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.417045 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b0e8baf-18db-482a-994e-6ccd87671fee-operator-scripts\") pod \"nova-cell1-db-create-zpc84\" (UID: \"4b0e8baf-18db-482a-994e-6ccd87671fee\") " pod="openstack/nova-cell1-db-create-zpc84" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.417101 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27qbb\" (UniqueName: \"kubernetes.io/projected/75c9c6ea-f164-4f1d-b907-681771c086e5-kube-api-access-27qbb\") pod \"nova-api-357d-account-create-update-cqp62\" (UID: \"75c9c6ea-f164-4f1d-b907-681771c086e5\") " pod="openstack/nova-api-357d-account-create-update-cqp62" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.417135 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75c9c6ea-f164-4f1d-b907-681771c086e5-operator-scripts\") pod \"nova-api-357d-account-create-update-cqp62\" (UID: \"75c9c6ea-f164-4f1d-b907-681771c086e5\") " pod="openstack/nova-api-357d-account-create-update-cqp62" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.427059 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b0e8baf-18db-482a-994e-6ccd87671fee-operator-scripts\") pod \"nova-cell1-db-create-zpc84\" (UID: \"4b0e8baf-18db-482a-994e-6ccd87671fee\") " pod="openstack/nova-cell1-db-create-zpc84" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.432828 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75c9c6ea-f164-4f1d-b907-681771c086e5-operator-scripts\") pod \"nova-api-357d-account-create-update-cqp62\" (UID: \"75c9c6ea-f164-4f1d-b907-681771c086e5\") " pod="openstack/nova-api-357d-account-create-update-cqp62" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.460604 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-2cf4-account-create-update-6p9qp"] Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.471470 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ln868\" (UniqueName: \"kubernetes.io/projected/4b0e8baf-18db-482a-994e-6ccd87671fee-kube-api-access-ln868\") pod \"nova-cell1-db-create-zpc84\" (UID: \"4b0e8baf-18db-482a-994e-6ccd87671fee\") " pod="openstack/nova-cell1-db-create-zpc84" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.501794 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27qbb\" (UniqueName: \"kubernetes.io/projected/75c9c6ea-f164-4f1d-b907-681771c086e5-kube-api-access-27qbb\") pod \"nova-api-357d-account-create-update-cqp62\" (UID: \"75c9c6ea-f164-4f1d-b907-681771c086e5\") " pod="openstack/nova-api-357d-account-create-update-cqp62" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.534002 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4ndg\" (UniqueName: \"kubernetes.io/projected/dc2359b8-eb58-403b-a848-bf3d750015bf-kube-api-access-d4ndg\") pod \"nova-cell0-2cf4-account-create-update-6p9qp\" (UID: \"dc2359b8-eb58-403b-a848-bf3d750015bf\") " pod="openstack/nova-cell0-2cf4-account-create-update-6p9qp" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.534062 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2359b8-eb58-403b-a848-bf3d750015bf-operator-scripts\") pod \"nova-cell0-2cf4-account-create-update-6p9qp\" (UID: \"dc2359b8-eb58-403b-a848-bf3d750015bf\") " pod="openstack/nova-cell0-2cf4-account-create-update-6p9qp" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.546024 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-fe46-account-create-update-r7zpj"] Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.547606 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-fe46-account-create-update-r7zpj" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.550442 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.557749 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-fe46-account-create-update-r7zpj"] Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.638990 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4ndg\" (UniqueName: \"kubernetes.io/projected/dc2359b8-eb58-403b-a848-bf3d750015bf-kube-api-access-d4ndg\") pod \"nova-cell0-2cf4-account-create-update-6p9qp\" (UID: \"dc2359b8-eb58-403b-a848-bf3d750015bf\") " pod="openstack/nova-cell0-2cf4-account-create-update-6p9qp" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.639238 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2359b8-eb58-403b-a848-bf3d750015bf-operator-scripts\") pod \"nova-cell0-2cf4-account-create-update-6p9qp\" (UID: \"dc2359b8-eb58-403b-a848-bf3d750015bf\") " pod="openstack/nova-cell0-2cf4-account-create-update-6p9qp" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.639984 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2359b8-eb58-403b-a848-bf3d750015bf-operator-scripts\") pod \"nova-cell0-2cf4-account-create-update-6p9qp\" (UID: \"dc2359b8-eb58-403b-a848-bf3d750015bf\") " pod="openstack/nova-cell0-2cf4-account-create-update-6p9qp" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.665013 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4ndg\" (UniqueName: \"kubernetes.io/projected/dc2359b8-eb58-403b-a848-bf3d750015bf-kube-api-access-d4ndg\") pod \"nova-cell0-2cf4-account-create-update-6p9qp\" (UID: \"dc2359b8-eb58-403b-a848-bf3d750015bf\") " pod="openstack/nova-cell0-2cf4-account-create-update-6p9qp" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.722592 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zpc84" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.740367 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbfvj\" (UniqueName: \"kubernetes.io/projected/82be14dd-0210-4d88-8dbe-c4ca490399aa-kube-api-access-dbfvj\") pod \"nova-cell1-fe46-account-create-update-r7zpj\" (UID: \"82be14dd-0210-4d88-8dbe-c4ca490399aa\") " pod="openstack/nova-cell1-fe46-account-create-update-r7zpj" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.740431 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82be14dd-0210-4d88-8dbe-c4ca490399aa-operator-scripts\") pod \"nova-cell1-fe46-account-create-update-r7zpj\" (UID: \"82be14dd-0210-4d88-8dbe-c4ca490399aa\") " pod="openstack/nova-cell1-fe46-account-create-update-r7zpj" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.758830 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-357d-account-create-update-cqp62" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.777482 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.778569 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.804054 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.809057 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.818019 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2cf4-account-create-update-6p9qp" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.845588 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbfvj\" (UniqueName: \"kubernetes.io/projected/82be14dd-0210-4d88-8dbe-c4ca490399aa-kube-api-access-dbfvj\") pod \"nova-cell1-fe46-account-create-update-r7zpj\" (UID: \"82be14dd-0210-4d88-8dbe-c4ca490399aa\") " pod="openstack/nova-cell1-fe46-account-create-update-r7zpj" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.845644 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82be14dd-0210-4d88-8dbe-c4ca490399aa-operator-scripts\") pod \"nova-cell1-fe46-account-create-update-r7zpj\" (UID: \"82be14dd-0210-4d88-8dbe-c4ca490399aa\") " pod="openstack/nova-cell1-fe46-account-create-update-r7zpj" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.846337 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82be14dd-0210-4d88-8dbe-c4ca490399aa-operator-scripts\") pod \"nova-cell1-fe46-account-create-update-r7zpj\" (UID: \"82be14dd-0210-4d88-8dbe-c4ca490399aa\") " pod="openstack/nova-cell1-fe46-account-create-update-r7zpj" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.884478 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbfvj\" (UniqueName: \"kubernetes.io/projected/82be14dd-0210-4d88-8dbe-c4ca490399aa-kube-api-access-dbfvj\") pod \"nova-cell1-fe46-account-create-update-r7zpj\" (UID: \"82be14dd-0210-4d88-8dbe-c4ca490399aa\") " pod="openstack/nova-cell1-fe46-account-create-update-r7zpj" Dec 03 07:46:40 crc kubenswrapper[4612]: I1203 07:46:40.905030 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-fe46-account-create-update-r7zpj" Dec 03 07:46:41 crc kubenswrapper[4612]: I1203 07:46:41.065901 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-nnmgl"] Dec 03 07:46:41 crc kubenswrapper[4612]: I1203 07:46:41.202422 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-krjbf"] Dec 03 07:46:41 crc kubenswrapper[4612]: I1203 07:46:41.474483 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-zpc84"] Dec 03 07:46:41 crc kubenswrapper[4612]: I1203 07:46:41.584578 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-357d-account-create-update-cqp62"] Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.007383 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-2cf4-account-create-update-6p9qp"] Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.038061 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-fe46-account-create-update-r7zpj"] Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.101423 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-nnmgl" event={"ID":"f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67","Type":"ContainerStarted","Data":"ff484b7a21a0b2dd360a06ca438a41e1a48881dffa515269ca506da4ca4329f7"} Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.101458 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-nnmgl" event={"ID":"f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67","Type":"ContainerStarted","Data":"395d45d5646c68eda3c903bcfb828f689ca9a80f2bfe0d9a855fa2dcf3c1cb6e"} Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.109074 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-fe46-account-create-update-r7zpj" event={"ID":"82be14dd-0210-4d88-8dbe-c4ca490399aa","Type":"ContainerStarted","Data":"e751be01c71de4867fd10cdbb4d09b81b60b11be0fc6afbca312a3c54eef4ff5"} Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.115250 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-zpc84" event={"ID":"4b0e8baf-18db-482a-994e-6ccd87671fee","Type":"ContainerStarted","Data":"53cb50233be2fea52ede3bd8d08c88fe03950800a47a25ee59d5bace548c570d"} Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.124497 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-357d-account-create-update-cqp62" event={"ID":"75c9c6ea-f164-4f1d-b907-681771c086e5","Type":"ContainerStarted","Data":"fdf9b31797c050d16e9f44553c77c7cd3c8037b4c9109647627b1447433c2244"} Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.124548 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-357d-account-create-update-cqp62" event={"ID":"75c9c6ea-f164-4f1d-b907-681771c086e5","Type":"ContainerStarted","Data":"e72e0af86784bd29853707c716b6506d338ac617bce44d5ecfe5c818de6cf54d"} Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.131978 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-krjbf" event={"ID":"f5a6e38a-a03f-4803-82e6-1acdd1b843ed","Type":"ContainerStarted","Data":"0ebd2f457a335251fb4347a02e1cc00b8de61f655d1f4357358fdfcbf9820a17"} Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.132010 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-krjbf" event={"ID":"f5a6e38a-a03f-4803-82e6-1acdd1b843ed","Type":"ContainerStarted","Data":"e54f91f528790fe764a488287c8509cef1ad6e068ec5fef437f608b0293e7fe9"} Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.139237 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-2cf4-account-create-update-6p9qp" event={"ID":"dc2359b8-eb58-403b-a848-bf3d750015bf","Type":"ContainerStarted","Data":"412338f2ddaeb4c3f4fb1da74f65201bed53d2b73a0eeee0fe4cee0dcf52cd08"} Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.140110 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-nnmgl" podStartSLOduration=3.140085077 podStartE2EDuration="3.140085077s" podCreationTimestamp="2025-12-03 07:46:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:46:42.126523019 +0000 UTC m=+1165.299880419" watchObservedRunningTime="2025-12-03 07:46:42.140085077 +0000 UTC m=+1165.313442487" Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.174727 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-357d-account-create-update-cqp62" podStartSLOduration=2.17470747 podStartE2EDuration="2.17470747s" podCreationTimestamp="2025-12-03 07:46:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:46:42.147239615 +0000 UTC m=+1165.320597025" watchObservedRunningTime="2025-12-03 07:46:42.17470747 +0000 UTC m=+1165.348064880" Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.222761 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-krjbf" podStartSLOduration=3.222736247 podStartE2EDuration="3.222736247s" podCreationTimestamp="2025-12-03 07:46:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:46:42.185032257 +0000 UTC m=+1165.358389657" watchObservedRunningTime="2025-12-03 07:46:42.222736247 +0000 UTC m=+1165.396093657" Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.434120 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.434347 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="00a52936-281e-497d-a9c4-9216dcc33120" containerName="glance-log" containerID="cri-o://c797e95863d479c36f0667bacdc48947aee38f56fe99fa0ad04b4265532b5cb5" gracePeriod=30 Dec 03 07:46:42 crc kubenswrapper[4612]: I1203 07:46:42.434769 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="00a52936-281e-497d-a9c4-9216dcc33120" containerName="glance-httpd" containerID="cri-o://c7fbfa80703b9a78a5c894b5a8f0fd108323def1d604f1ca94cc6327dd27d0b1" gracePeriod=30 Dec 03 07:46:43 crc kubenswrapper[4612]: I1203 07:46:43.148454 4612 generic.go:334] "Generic (PLEG): container finished" podID="f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67" containerID="ff484b7a21a0b2dd360a06ca438a41e1a48881dffa515269ca506da4ca4329f7" exitCode=0 Dec 03 07:46:43 crc kubenswrapper[4612]: I1203 07:46:43.148516 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-nnmgl" event={"ID":"f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67","Type":"ContainerDied","Data":"ff484b7a21a0b2dd360a06ca438a41e1a48881dffa515269ca506da4ca4329f7"} Dec 03 07:46:43 crc kubenswrapper[4612]: I1203 07:46:43.150325 4612 generic.go:334] "Generic (PLEG): container finished" podID="4b0e8baf-18db-482a-994e-6ccd87671fee" containerID="4628bbeab0dbb5bf38ec46ef91ce70fc5da54f1015a5a3bb5105d8b7686819c4" exitCode=0 Dec 03 07:46:43 crc kubenswrapper[4612]: I1203 07:46:43.150378 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-zpc84" event={"ID":"4b0e8baf-18db-482a-994e-6ccd87671fee","Type":"ContainerDied","Data":"4628bbeab0dbb5bf38ec46ef91ce70fc5da54f1015a5a3bb5105d8b7686819c4"} Dec 03 07:46:43 crc kubenswrapper[4612]: I1203 07:46:43.152511 4612 generic.go:334] "Generic (PLEG): container finished" podID="00a52936-281e-497d-a9c4-9216dcc33120" containerID="c797e95863d479c36f0667bacdc48947aee38f56fe99fa0ad04b4265532b5cb5" exitCode=143 Dec 03 07:46:43 crc kubenswrapper[4612]: I1203 07:46:43.152634 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"00a52936-281e-497d-a9c4-9216dcc33120","Type":"ContainerDied","Data":"c797e95863d479c36f0667bacdc48947aee38f56fe99fa0ad04b4265532b5cb5"} Dec 03 07:46:43 crc kubenswrapper[4612]: I1203 07:46:43.154544 4612 generic.go:334] "Generic (PLEG): container finished" podID="75c9c6ea-f164-4f1d-b907-681771c086e5" containerID="fdf9b31797c050d16e9f44553c77c7cd3c8037b4c9109647627b1447433c2244" exitCode=0 Dec 03 07:46:43 crc kubenswrapper[4612]: I1203 07:46:43.154572 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-357d-account-create-update-cqp62" event={"ID":"75c9c6ea-f164-4f1d-b907-681771c086e5","Type":"ContainerDied","Data":"fdf9b31797c050d16e9f44553c77c7cd3c8037b4c9109647627b1447433c2244"} Dec 03 07:46:43 crc kubenswrapper[4612]: I1203 07:46:43.156260 4612 generic.go:334] "Generic (PLEG): container finished" podID="dc2359b8-eb58-403b-a848-bf3d750015bf" containerID="710ce6f0c20285af1557680764bb5701049012bf9c1868315e7d23b1b1640833" exitCode=0 Dec 03 07:46:43 crc kubenswrapper[4612]: I1203 07:46:43.156394 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-2cf4-account-create-update-6p9qp" event={"ID":"dc2359b8-eb58-403b-a848-bf3d750015bf","Type":"ContainerDied","Data":"710ce6f0c20285af1557680764bb5701049012bf9c1868315e7d23b1b1640833"} Dec 03 07:46:43 crc kubenswrapper[4612]: I1203 07:46:43.161425 4612 generic.go:334] "Generic (PLEG): container finished" podID="82be14dd-0210-4d88-8dbe-c4ca490399aa" containerID="2496b60700d18c3720d14fdc2988a4d0b0bf8680a9d3c258762bc17077315378" exitCode=0 Dec 03 07:46:43 crc kubenswrapper[4612]: I1203 07:46:43.161530 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-fe46-account-create-update-r7zpj" event={"ID":"82be14dd-0210-4d88-8dbe-c4ca490399aa","Type":"ContainerDied","Data":"2496b60700d18c3720d14fdc2988a4d0b0bf8680a9d3c258762bc17077315378"} Dec 03 07:46:43 crc kubenswrapper[4612]: I1203 07:46:43.163494 4612 generic.go:334] "Generic (PLEG): container finished" podID="f5a6e38a-a03f-4803-82e6-1acdd1b843ed" containerID="0ebd2f457a335251fb4347a02e1cc00b8de61f655d1f4357358fdfcbf9820a17" exitCode=0 Dec 03 07:46:43 crc kubenswrapper[4612]: I1203 07:46:43.163531 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-krjbf" event={"ID":"f5a6e38a-a03f-4803-82e6-1acdd1b843ed","Type":"ContainerDied","Data":"0ebd2f457a335251fb4347a02e1cc00b8de61f655d1f4357358fdfcbf9820a17"} Dec 03 07:46:44 crc kubenswrapper[4612]: I1203 07:46:44.668409 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-nnmgl" Dec 03 07:46:44 crc kubenswrapper[4612]: I1203 07:46:44.838364 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvx6t\" (UniqueName: \"kubernetes.io/projected/f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67-kube-api-access-rvx6t\") pod \"f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67\" (UID: \"f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67\") " Dec 03 07:46:44 crc kubenswrapper[4612]: I1203 07:46:44.839335 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67-operator-scripts\") pod \"f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67\" (UID: \"f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67\") " Dec 03 07:46:44 crc kubenswrapper[4612]: I1203 07:46:44.840340 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67" (UID: "f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:44 crc kubenswrapper[4612]: I1203 07:46:44.853150 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67-kube-api-access-rvx6t" (OuterVolumeSpecName: "kube-api-access-rvx6t") pod "f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67" (UID: "f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67"). InnerVolumeSpecName "kube-api-access-rvx6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:44 crc kubenswrapper[4612]: I1203 07:46:44.944580 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvx6t\" (UniqueName: \"kubernetes.io/projected/f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67-kube-api-access-rvx6t\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:44 crc kubenswrapper[4612]: I1203 07:46:44.944614 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:44 crc kubenswrapper[4612]: I1203 07:46:44.982101 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-krjbf" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.114234 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-357d-account-create-update-cqp62" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.151625 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5a6e38a-a03f-4803-82e6-1acdd1b843ed-operator-scripts\") pod \"f5a6e38a-a03f-4803-82e6-1acdd1b843ed\" (UID: \"f5a6e38a-a03f-4803-82e6-1acdd1b843ed\") " Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.151733 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lssht\" (UniqueName: \"kubernetes.io/projected/f5a6e38a-a03f-4803-82e6-1acdd1b843ed-kube-api-access-lssht\") pod \"f5a6e38a-a03f-4803-82e6-1acdd1b843ed\" (UID: \"f5a6e38a-a03f-4803-82e6-1acdd1b843ed\") " Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.152440 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2cf4-account-create-update-6p9qp" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.154679 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5a6e38a-a03f-4803-82e6-1acdd1b843ed-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f5a6e38a-a03f-4803-82e6-1acdd1b843ed" (UID: "f5a6e38a-a03f-4803-82e6-1acdd1b843ed"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.177982 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-fe46-account-create-update-r7zpj" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.180518 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5a6e38a-a03f-4803-82e6-1acdd1b843ed-kube-api-access-lssht" (OuterVolumeSpecName: "kube-api-access-lssht") pod "f5a6e38a-a03f-4803-82e6-1acdd1b843ed" (UID: "f5a6e38a-a03f-4803-82e6-1acdd1b843ed"). InnerVolumeSpecName "kube-api-access-lssht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.232235 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-357d-account-create-update-cqp62" event={"ID":"75c9c6ea-f164-4f1d-b907-681771c086e5","Type":"ContainerDied","Data":"e72e0af86784bd29853707c716b6506d338ac617bce44d5ecfe5c818de6cf54d"} Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.232284 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e72e0af86784bd29853707c716b6506d338ac617bce44d5ecfe5c818de6cf54d" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.232432 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-357d-account-create-update-cqp62" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.234682 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-krjbf" event={"ID":"f5a6e38a-a03f-4803-82e6-1acdd1b843ed","Type":"ContainerDied","Data":"e54f91f528790fe764a488287c8509cef1ad6e068ec5fef437f608b0293e7fe9"} Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.234707 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e54f91f528790fe764a488287c8509cef1ad6e068ec5fef437f608b0293e7fe9" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.234761 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-krjbf" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.236374 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-2cf4-account-create-update-6p9qp" event={"ID":"dc2359b8-eb58-403b-a848-bf3d750015bf","Type":"ContainerDied","Data":"412338f2ddaeb4c3f4fb1da74f65201bed53d2b73a0eeee0fe4cee0dcf52cd08"} Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.236395 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="412338f2ddaeb4c3f4fb1da74f65201bed53d2b73a0eeee0fe4cee0dcf52cd08" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.236439 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2cf4-account-create-update-6p9qp" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.238382 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-fe46-account-create-update-r7zpj" event={"ID":"82be14dd-0210-4d88-8dbe-c4ca490399aa","Type":"ContainerDied","Data":"e751be01c71de4867fd10cdbb4d09b81b60b11be0fc6afbca312a3c54eef4ff5"} Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.238404 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e751be01c71de4867fd10cdbb4d09b81b60b11be0fc6afbca312a3c54eef4ff5" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.238450 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-fe46-account-create-update-r7zpj" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.248174 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-nnmgl" event={"ID":"f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67","Type":"ContainerDied","Data":"395d45d5646c68eda3c903bcfb828f689ca9a80f2bfe0d9a855fa2dcf3c1cb6e"} Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.248212 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="395d45d5646c68eda3c903bcfb828f689ca9a80f2bfe0d9a855fa2dcf3c1cb6e" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.248348 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-nnmgl" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.256644 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4ndg\" (UniqueName: \"kubernetes.io/projected/dc2359b8-eb58-403b-a848-bf3d750015bf-kube-api-access-d4ndg\") pod \"dc2359b8-eb58-403b-a848-bf3d750015bf\" (UID: \"dc2359b8-eb58-403b-a848-bf3d750015bf\") " Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.256698 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2359b8-eb58-403b-a848-bf3d750015bf-operator-scripts\") pod \"dc2359b8-eb58-403b-a848-bf3d750015bf\" (UID: \"dc2359b8-eb58-403b-a848-bf3d750015bf\") " Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.256757 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75c9c6ea-f164-4f1d-b907-681771c086e5-operator-scripts\") pod \"75c9c6ea-f164-4f1d-b907-681771c086e5\" (UID: \"75c9c6ea-f164-4f1d-b907-681771c086e5\") " Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.256866 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82be14dd-0210-4d88-8dbe-c4ca490399aa-operator-scripts\") pod \"82be14dd-0210-4d88-8dbe-c4ca490399aa\" (UID: \"82be14dd-0210-4d88-8dbe-c4ca490399aa\") " Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.256906 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbfvj\" (UniqueName: \"kubernetes.io/projected/82be14dd-0210-4d88-8dbe-c4ca490399aa-kube-api-access-dbfvj\") pod \"82be14dd-0210-4d88-8dbe-c4ca490399aa\" (UID: \"82be14dd-0210-4d88-8dbe-c4ca490399aa\") " Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.256929 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27qbb\" (UniqueName: \"kubernetes.io/projected/75c9c6ea-f164-4f1d-b907-681771c086e5-kube-api-access-27qbb\") pod \"75c9c6ea-f164-4f1d-b907-681771c086e5\" (UID: \"75c9c6ea-f164-4f1d-b907-681771c086e5\") " Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.257557 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5a6e38a-a03f-4803-82e6-1acdd1b843ed-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.257578 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lssht\" (UniqueName: \"kubernetes.io/projected/f5a6e38a-a03f-4803-82e6-1acdd1b843ed-kube-api-access-lssht\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.258911 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75c9c6ea-f164-4f1d-b907-681771c086e5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "75c9c6ea-f164-4f1d-b907-681771c086e5" (UID: "75c9c6ea-f164-4f1d-b907-681771c086e5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.260416 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc2359b8-eb58-403b-a848-bf3d750015bf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dc2359b8-eb58-403b-a848-bf3d750015bf" (UID: "dc2359b8-eb58-403b-a848-bf3d750015bf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.260836 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82be14dd-0210-4d88-8dbe-c4ca490399aa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "82be14dd-0210-4d88-8dbe-c4ca490399aa" (UID: "82be14dd-0210-4d88-8dbe-c4ca490399aa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.264722 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc2359b8-eb58-403b-a848-bf3d750015bf-kube-api-access-d4ndg" (OuterVolumeSpecName: "kube-api-access-d4ndg") pod "dc2359b8-eb58-403b-a848-bf3d750015bf" (UID: "dc2359b8-eb58-403b-a848-bf3d750015bf"). InnerVolumeSpecName "kube-api-access-d4ndg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.268560 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75c9c6ea-f164-4f1d-b907-681771c086e5-kube-api-access-27qbb" (OuterVolumeSpecName: "kube-api-access-27qbb") pod "75c9c6ea-f164-4f1d-b907-681771c086e5" (UID: "75c9c6ea-f164-4f1d-b907-681771c086e5"). InnerVolumeSpecName "kube-api-access-27qbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.269358 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82be14dd-0210-4d88-8dbe-c4ca490399aa-kube-api-access-dbfvj" (OuterVolumeSpecName: "kube-api-access-dbfvj") pod "82be14dd-0210-4d88-8dbe-c4ca490399aa" (UID: "82be14dd-0210-4d88-8dbe-c4ca490399aa"). InnerVolumeSpecName "kube-api-access-dbfvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.272144 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zpc84" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.361653 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b0e8baf-18db-482a-994e-6ccd87671fee-operator-scripts\") pod \"4b0e8baf-18db-482a-994e-6ccd87671fee\" (UID: \"4b0e8baf-18db-482a-994e-6ccd87671fee\") " Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.361716 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ln868\" (UniqueName: \"kubernetes.io/projected/4b0e8baf-18db-482a-994e-6ccd87671fee-kube-api-access-ln868\") pod \"4b0e8baf-18db-482a-994e-6ccd87671fee\" (UID: \"4b0e8baf-18db-482a-994e-6ccd87671fee\") " Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.362071 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b0e8baf-18db-482a-994e-6ccd87671fee-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4b0e8baf-18db-482a-994e-6ccd87671fee" (UID: "4b0e8baf-18db-482a-994e-6ccd87671fee"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.362323 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82be14dd-0210-4d88-8dbe-c4ca490399aa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.362345 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbfvj\" (UniqueName: \"kubernetes.io/projected/82be14dd-0210-4d88-8dbe-c4ca490399aa-kube-api-access-dbfvj\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.362356 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27qbb\" (UniqueName: \"kubernetes.io/projected/75c9c6ea-f164-4f1d-b907-681771c086e5-kube-api-access-27qbb\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.362364 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b0e8baf-18db-482a-994e-6ccd87671fee-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.362374 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4ndg\" (UniqueName: \"kubernetes.io/projected/dc2359b8-eb58-403b-a848-bf3d750015bf-kube-api-access-d4ndg\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.362382 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2359b8-eb58-403b-a848-bf3d750015bf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.362390 4612 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75c9c6ea-f164-4f1d-b907-681771c086e5-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.366197 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b0e8baf-18db-482a-994e-6ccd87671fee-kube-api-access-ln868" (OuterVolumeSpecName: "kube-api-access-ln868") pod "4b0e8baf-18db-482a-994e-6ccd87671fee" (UID: "4b0e8baf-18db-482a-994e-6ccd87671fee"). InnerVolumeSpecName "kube-api-access-ln868". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:45 crc kubenswrapper[4612]: I1203 07:46:45.463675 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ln868\" (UniqueName: \"kubernetes.io/projected/4b0e8baf-18db-482a-994e-6ccd87671fee-kube-api-access-ln868\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.280205 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-zpc84" event={"ID":"4b0e8baf-18db-482a-994e-6ccd87671fee","Type":"ContainerDied","Data":"53cb50233be2fea52ede3bd8d08c88fe03950800a47a25ee59d5bace548c570d"} Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.280245 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53cb50233be2fea52ede3bd8d08c88fe03950800a47a25ee59d5bace548c570d" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.280311 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zpc84" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.288329 4612 generic.go:334] "Generic (PLEG): container finished" podID="00a52936-281e-497d-a9c4-9216dcc33120" containerID="c7fbfa80703b9a78a5c894b5a8f0fd108323def1d604f1ca94cc6327dd27d0b1" exitCode=0 Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.288377 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"00a52936-281e-497d-a9c4-9216dcc33120","Type":"ContainerDied","Data":"c7fbfa80703b9a78a5c894b5a8f0fd108323def1d604f1ca94cc6327dd27d0b1"} Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.453088 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.585794 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-config-data\") pod \"00a52936-281e-497d-a9c4-9216dcc33120\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.585866 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-scripts\") pod \"00a52936-281e-497d-a9c4-9216dcc33120\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.585899 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00a52936-281e-497d-a9c4-9216dcc33120-logs\") pod \"00a52936-281e-497d-a9c4-9216dcc33120\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.585920 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"00a52936-281e-497d-a9c4-9216dcc33120\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.585969 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-combined-ca-bundle\") pod \"00a52936-281e-497d-a9c4-9216dcc33120\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.586007 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-internal-tls-certs\") pod \"00a52936-281e-497d-a9c4-9216dcc33120\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.586034 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/00a52936-281e-497d-a9c4-9216dcc33120-httpd-run\") pod \"00a52936-281e-497d-a9c4-9216dcc33120\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.586094 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcdfb\" (UniqueName: \"kubernetes.io/projected/00a52936-281e-497d-a9c4-9216dcc33120-kube-api-access-bcdfb\") pod \"00a52936-281e-497d-a9c4-9216dcc33120\" (UID: \"00a52936-281e-497d-a9c4-9216dcc33120\") " Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.586430 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00a52936-281e-497d-a9c4-9216dcc33120-logs" (OuterVolumeSpecName: "logs") pod "00a52936-281e-497d-a9c4-9216dcc33120" (UID: "00a52936-281e-497d-a9c4-9216dcc33120"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.586827 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00a52936-281e-497d-a9c4-9216dcc33120-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "00a52936-281e-497d-a9c4-9216dcc33120" (UID: "00a52936-281e-497d-a9c4-9216dcc33120"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.587160 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00a52936-281e-497d-a9c4-9216dcc33120-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.587184 4612 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/00a52936-281e-497d-a9c4-9216dcc33120-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.596326 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-scripts" (OuterVolumeSpecName: "scripts") pod "00a52936-281e-497d-a9c4-9216dcc33120" (UID: "00a52936-281e-497d-a9c4-9216dcc33120"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.596751 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00a52936-281e-497d-a9c4-9216dcc33120-kube-api-access-bcdfb" (OuterVolumeSpecName: "kube-api-access-bcdfb") pod "00a52936-281e-497d-a9c4-9216dcc33120" (UID: "00a52936-281e-497d-a9c4-9216dcc33120"). InnerVolumeSpecName "kube-api-access-bcdfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.616237 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "00a52936-281e-497d-a9c4-9216dcc33120" (UID: "00a52936-281e-497d-a9c4-9216dcc33120"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.654015 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "00a52936-281e-497d-a9c4-9216dcc33120" (UID: "00a52936-281e-497d-a9c4-9216dcc33120"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.692361 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.692413 4612 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.692431 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.692443 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcdfb\" (UniqueName: \"kubernetes.io/projected/00a52936-281e-497d-a9c4-9216dcc33120-kube-api-access-bcdfb\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.697199 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-config-data" (OuterVolumeSpecName: "config-data") pod "00a52936-281e-497d-a9c4-9216dcc33120" (UID: "00a52936-281e-497d-a9c4-9216dcc33120"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.720250 4612 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.720696 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "00a52936-281e-497d-a9c4-9216dcc33120" (UID: "00a52936-281e-497d-a9c4-9216dcc33120"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.796097 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.796136 4612 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:46 crc kubenswrapper[4612]: I1203 07:46:46.796145 4612 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/00a52936-281e-497d-a9c4-9216dcc33120-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.136190 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.136665 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.136785 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.137533 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f57887f822733dc51bac24ab820b117594d02a8eaa928e58dcd6bce042c04fbf"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.137662 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://f57887f822733dc51bac24ab820b117594d02a8eaa928e58dcd6bce042c04fbf" gracePeriod=600 Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.297157 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"00a52936-281e-497d-a9c4-9216dcc33120","Type":"ContainerDied","Data":"aed0a2eb71730ac7a483066e588f1af70756446d0c35418691b93e1de1dbfb48"} Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.297202 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.297222 4612 scope.go:117] "RemoveContainer" containerID="c7fbfa80703b9a78a5c894b5a8f0fd108323def1d604f1ca94cc6327dd27d0b1" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.299221 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="f57887f822733dc51bac24ab820b117594d02a8eaa928e58dcd6bce042c04fbf" exitCode=0 Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.299262 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"f57887f822733dc51bac24ab820b117594d02a8eaa928e58dcd6bce042c04fbf"} Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.327172 4612 scope.go:117] "RemoveContainer" containerID="c797e95863d479c36f0667bacdc48947aee38f56fe99fa0ad04b4265532b5cb5" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.328779 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.343313 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.362285 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:46:47 crc kubenswrapper[4612]: E1203 07:46:47.362623 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a52936-281e-497d-a9c4-9216dcc33120" containerName="glance-httpd" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.362637 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a52936-281e-497d-a9c4-9216dcc33120" containerName="glance-httpd" Dec 03 07:46:47 crc kubenswrapper[4612]: E1203 07:46:47.362648 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a52936-281e-497d-a9c4-9216dcc33120" containerName="glance-log" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.362654 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a52936-281e-497d-a9c4-9216dcc33120" containerName="glance-log" Dec 03 07:46:47 crc kubenswrapper[4612]: E1203 07:46:47.362672 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc2359b8-eb58-403b-a848-bf3d750015bf" containerName="mariadb-account-create-update" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.362678 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc2359b8-eb58-403b-a848-bf3d750015bf" containerName="mariadb-account-create-update" Dec 03 07:46:47 crc kubenswrapper[4612]: E1203 07:46:47.362699 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82be14dd-0210-4d88-8dbe-c4ca490399aa" containerName="mariadb-account-create-update" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.362705 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="82be14dd-0210-4d88-8dbe-c4ca490399aa" containerName="mariadb-account-create-update" Dec 03 07:46:47 crc kubenswrapper[4612]: E1203 07:46:47.362715 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5a6e38a-a03f-4803-82e6-1acdd1b843ed" containerName="mariadb-database-create" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.362720 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5a6e38a-a03f-4803-82e6-1acdd1b843ed" containerName="mariadb-database-create" Dec 03 07:46:47 crc kubenswrapper[4612]: E1203 07:46:47.362733 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67" containerName="mariadb-database-create" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.362738 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67" containerName="mariadb-database-create" Dec 03 07:46:47 crc kubenswrapper[4612]: E1203 07:46:47.362752 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75c9c6ea-f164-4f1d-b907-681771c086e5" containerName="mariadb-account-create-update" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.362757 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="75c9c6ea-f164-4f1d-b907-681771c086e5" containerName="mariadb-account-create-update" Dec 03 07:46:47 crc kubenswrapper[4612]: E1203 07:46:47.362768 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b0e8baf-18db-482a-994e-6ccd87671fee" containerName="mariadb-database-create" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.362774 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b0e8baf-18db-482a-994e-6ccd87671fee" containerName="mariadb-database-create" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.362930 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b0e8baf-18db-482a-994e-6ccd87671fee" containerName="mariadb-database-create" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.369861 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67" containerName="mariadb-database-create" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.369959 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a52936-281e-497d-a9c4-9216dcc33120" containerName="glance-log" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.369974 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="75c9c6ea-f164-4f1d-b907-681771c086e5" containerName="mariadb-account-create-update" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.369985 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc2359b8-eb58-403b-a848-bf3d750015bf" containerName="mariadb-account-create-update" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.369996 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="82be14dd-0210-4d88-8dbe-c4ca490399aa" containerName="mariadb-account-create-update" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.370013 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a52936-281e-497d-a9c4-9216dcc33120" containerName="glance-httpd" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.370020 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5a6e38a-a03f-4803-82e6-1acdd1b843ed" containerName="mariadb-database-create" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.371223 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.374728 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.375538 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.379314 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.430889 4612 scope.go:117] "RemoveContainer" containerID="253ee07d7cd8000b3306a0841b31f595ab9d1ea323b7f796e6790764b3205b1e" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.509030 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjmbf\" (UniqueName: \"kubernetes.io/projected/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-kube-api-access-mjmbf\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.509543 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.509678 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.509801 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.509918 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.510072 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.510371 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.510516 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-logs\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.641676 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.642223 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.642398 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-logs\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.642538 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjmbf\" (UniqueName: \"kubernetes.io/projected/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-kube-api-access-mjmbf\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.642690 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.642806 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.642970 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.645046 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.643829 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.644112 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.643548 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-logs\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.658475 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.660022 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.662039 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.675581 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.691318 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjmbf\" (UniqueName: \"kubernetes.io/projected/f68a55b9-312b-42b5-a0ac-ffe92a4e81b8-kube-api-access-mjmbf\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:47 crc kubenswrapper[4612]: I1203 07:46:47.722773 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8\") " pod="openstack/glance-default-internal-api-0" Dec 03 07:46:48 crc kubenswrapper[4612]: I1203 07:46:48.003677 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 07:46:48 crc kubenswrapper[4612]: I1203 07:46:48.316916 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"7105508fd7187feb30bdf8f839f0a6f2f8652223659d23672f28b78428b4cb14"} Dec 03 07:46:48 crc kubenswrapper[4612]: I1203 07:46:48.609693 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 07:46:48 crc kubenswrapper[4612]: I1203 07:46:48.804016 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:46:48 crc kubenswrapper[4612]: I1203 07:46:48.804551 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="985c25c3-b874-4132-a5d8-366ab5e0fab6" containerName="glance-log" containerID="cri-o://e0badb7ce14b685f090002a5b47794913e81e794ec5d65f79e999e088a621874" gracePeriod=30 Dec 03 07:46:48 crc kubenswrapper[4612]: I1203 07:46:48.804622 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="985c25c3-b874-4132-a5d8-366ab5e0fab6" containerName="glance-httpd" containerID="cri-o://f6d236d170ce6dec577c0f5c323df170cd9f609ae91ddf240e73b257852e47c3" gracePeriod=30 Dec 03 07:46:49 crc kubenswrapper[4612]: I1203 07:46:49.101143 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00a52936-281e-497d-a9c4-9216dcc33120" path="/var/lib/kubelet/pods/00a52936-281e-497d-a9c4-9216dcc33120/volumes" Dec 03 07:46:49 crc kubenswrapper[4612]: I1203 07:46:49.340225 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8","Type":"ContainerStarted","Data":"3dbfcc52faa37be2ba6a70b5b820a320e31a0327771b615b1bffdc7bf4aa7616"} Dec 03 07:46:49 crc kubenswrapper[4612]: I1203 07:46:49.340518 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8","Type":"ContainerStarted","Data":"24d2c1c46b9ced56b40715b78fed5d6958b4389ad9659fca400e616c595d8836"} Dec 03 07:46:49 crc kubenswrapper[4612]: I1203 07:46:49.343282 4612 generic.go:334] "Generic (PLEG): container finished" podID="985c25c3-b874-4132-a5d8-366ab5e0fab6" containerID="e0badb7ce14b685f090002a5b47794913e81e794ec5d65f79e999e088a621874" exitCode=143 Dec 03 07:46:49 crc kubenswrapper[4612]: I1203 07:46:49.344093 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"985c25c3-b874-4132-a5d8-366ab5e0fab6","Type":"ContainerDied","Data":"e0badb7ce14b685f090002a5b47794913e81e794ec5d65f79e999e088a621874"} Dec 03 07:46:50 crc kubenswrapper[4612]: I1203 07:46:50.353631 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f68a55b9-312b-42b5-a0ac-ffe92a4e81b8","Type":"ContainerStarted","Data":"d3759bbdd40ed1946ac4b1a8c76dd2a2e1b0d7f0ae9f020447e0c81ac7e617dc"} Dec 03 07:46:50 crc kubenswrapper[4612]: I1203 07:46:50.383314 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.38329348 podStartE2EDuration="3.38329348s" podCreationTimestamp="2025-12-03 07:46:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:46:50.376297947 +0000 UTC m=+1173.549655397" watchObservedRunningTime="2025-12-03 07:46:50.38329348 +0000 UTC m=+1173.556650900" Dec 03 07:46:50 crc kubenswrapper[4612]: I1203 07:46:50.778749 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-767d79bd88-5spkc" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 03 07:46:50 crc kubenswrapper[4612]: I1203 07:46:50.805121 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-55fc5c6c94-pjh5s" podUID="29d52104-a465-4ca0-a040-d9dba9e47600" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Dec 03 07:46:50 crc kubenswrapper[4612]: I1203 07:46:50.886149 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-m5ff7"] Dec 03 07:46:50 crc kubenswrapper[4612]: I1203 07:46:50.887713 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:46:50 crc kubenswrapper[4612]: I1203 07:46:50.893721 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 03 07:46:50 crc kubenswrapper[4612]: I1203 07:46:50.895239 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-pjkn4" Dec 03 07:46:50 crc kubenswrapper[4612]: I1203 07:46:50.896173 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 03 07:46:50 crc kubenswrapper[4612]: I1203 07:46:50.904114 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-m5ff7"] Dec 03 07:46:50 crc kubenswrapper[4612]: I1203 07:46:50.948389 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw5ht\" (UniqueName: \"kubernetes.io/projected/304d081f-b185-4106-b4e1-56b1bdf60a8f-kube-api-access-rw5ht\") pod \"nova-cell0-conductor-db-sync-m5ff7\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:46:50 crc kubenswrapper[4612]: I1203 07:46:50.948443 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-m5ff7\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:46:50 crc kubenswrapper[4612]: I1203 07:46:50.948537 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-config-data\") pod \"nova-cell0-conductor-db-sync-m5ff7\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:46:50 crc kubenswrapper[4612]: I1203 07:46:50.948697 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-scripts\") pod \"nova-cell0-conductor-db-sync-m5ff7\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:46:51 crc kubenswrapper[4612]: I1203 07:46:51.050575 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw5ht\" (UniqueName: \"kubernetes.io/projected/304d081f-b185-4106-b4e1-56b1bdf60a8f-kube-api-access-rw5ht\") pod \"nova-cell0-conductor-db-sync-m5ff7\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:46:51 crc kubenswrapper[4612]: I1203 07:46:51.050938 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-m5ff7\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:46:51 crc kubenswrapper[4612]: I1203 07:46:51.051051 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-config-data\") pod \"nova-cell0-conductor-db-sync-m5ff7\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:46:51 crc kubenswrapper[4612]: I1203 07:46:51.051147 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-scripts\") pod \"nova-cell0-conductor-db-sync-m5ff7\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:46:51 crc kubenswrapper[4612]: I1203 07:46:51.065036 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-m5ff7\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:46:51 crc kubenswrapper[4612]: I1203 07:46:51.065295 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-config-data\") pod \"nova-cell0-conductor-db-sync-m5ff7\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:46:51 crc kubenswrapper[4612]: I1203 07:46:51.065506 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-scripts\") pod \"nova-cell0-conductor-db-sync-m5ff7\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:46:51 crc kubenswrapper[4612]: I1203 07:46:51.089569 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw5ht\" (UniqueName: \"kubernetes.io/projected/304d081f-b185-4106-b4e1-56b1bdf60a8f-kube-api-access-rw5ht\") pod \"nova-cell0-conductor-db-sync-m5ff7\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:46:51 crc kubenswrapper[4612]: I1203 07:46:51.206651 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:46:51 crc kubenswrapper[4612]: I1203 07:46:51.320416 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 07:46:51 crc kubenswrapper[4612]: I1203 07:46:51.775930 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-m5ff7"] Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.230435 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="985c25c3-b874-4132-a5d8-366ab5e0fab6" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.150:9292/healthcheck\": dial tcp 10.217.0.150:9292: connect: connection refused" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.231000 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="985c25c3-b874-4132-a5d8-366ab5e0fab6" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.150:9292/healthcheck\": dial tcp 10.217.0.150:9292: connect: connection refused" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.415738 4612 generic.go:334] "Generic (PLEG): container finished" podID="985c25c3-b874-4132-a5d8-366ab5e0fab6" containerID="f6d236d170ce6dec577c0f5c323df170cd9f609ae91ddf240e73b257852e47c3" exitCode=0 Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.416030 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"985c25c3-b874-4132-a5d8-366ab5e0fab6","Type":"ContainerDied","Data":"f6d236d170ce6dec577c0f5c323df170cd9f609ae91ddf240e73b257852e47c3"} Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.422208 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-m5ff7" event={"ID":"304d081f-b185-4106-b4e1-56b1bdf60a8f","Type":"ContainerStarted","Data":"9704ad8a552265f14fc4d68264ac447eec311da4288b0adf9b3c09c58873e736"} Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.676033 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.779471 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-scripts\") pod \"985c25c3-b874-4132-a5d8-366ab5e0fab6\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.779788 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-public-tls-certs\") pod \"985c25c3-b874-4132-a5d8-366ab5e0fab6\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.779810 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"985c25c3-b874-4132-a5d8-366ab5e0fab6\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.779845 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/985c25c3-b874-4132-a5d8-366ab5e0fab6-logs\") pod \"985c25c3-b874-4132-a5d8-366ab5e0fab6\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.779885 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-combined-ca-bundle\") pod \"985c25c3-b874-4132-a5d8-366ab5e0fab6\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.779960 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rdpw\" (UniqueName: \"kubernetes.io/projected/985c25c3-b874-4132-a5d8-366ab5e0fab6-kube-api-access-8rdpw\") pod \"985c25c3-b874-4132-a5d8-366ab5e0fab6\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.780053 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-config-data\") pod \"985c25c3-b874-4132-a5d8-366ab5e0fab6\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.780134 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/985c25c3-b874-4132-a5d8-366ab5e0fab6-httpd-run\") pod \"985c25c3-b874-4132-a5d8-366ab5e0fab6\" (UID: \"985c25c3-b874-4132-a5d8-366ab5e0fab6\") " Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.780642 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/985c25c3-b874-4132-a5d8-366ab5e0fab6-logs" (OuterVolumeSpecName: "logs") pod "985c25c3-b874-4132-a5d8-366ab5e0fab6" (UID: "985c25c3-b874-4132-a5d8-366ab5e0fab6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.781043 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/985c25c3-b874-4132-a5d8-366ab5e0fab6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "985c25c3-b874-4132-a5d8-366ab5e0fab6" (UID: "985c25c3-b874-4132-a5d8-366ab5e0fab6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.807641 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/985c25c3-b874-4132-a5d8-366ab5e0fab6-kube-api-access-8rdpw" (OuterVolumeSpecName: "kube-api-access-8rdpw") pod "985c25c3-b874-4132-a5d8-366ab5e0fab6" (UID: "985c25c3-b874-4132-a5d8-366ab5e0fab6"). InnerVolumeSpecName "kube-api-access-8rdpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.822452 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-scripts" (OuterVolumeSpecName: "scripts") pod "985c25c3-b874-4132-a5d8-366ab5e0fab6" (UID: "985c25c3-b874-4132-a5d8-366ab5e0fab6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.822553 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "985c25c3-b874-4132-a5d8-366ab5e0fab6" (UID: "985c25c3-b874-4132-a5d8-366ab5e0fab6"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.836012 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "985c25c3-b874-4132-a5d8-366ab5e0fab6" (UID: "985c25c3-b874-4132-a5d8-366ab5e0fab6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.870113 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-config-data" (OuterVolumeSpecName: "config-data") pod "985c25c3-b874-4132-a5d8-366ab5e0fab6" (UID: "985c25c3-b874-4132-a5d8-366ab5e0fab6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.874154 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "985c25c3-b874-4132-a5d8-366ab5e0fab6" (UID: "985c25c3-b874-4132-a5d8-366ab5e0fab6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.881765 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.881802 4612 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.881844 4612 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.881858 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/985c25c3-b874-4132-a5d8-366ab5e0fab6-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.881870 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.881884 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rdpw\" (UniqueName: \"kubernetes.io/projected/985c25c3-b874-4132-a5d8-366ab5e0fab6-kube-api-access-8rdpw\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.881895 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985c25c3-b874-4132-a5d8-366ab5e0fab6-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.881905 4612 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/985c25c3-b874-4132-a5d8-366ab5e0fab6-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.927063 4612 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 03 07:46:52 crc kubenswrapper[4612]: I1203 07:46:52.984224 4612 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.435653 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"985c25c3-b874-4132-a5d8-366ab5e0fab6","Type":"ContainerDied","Data":"ef7440fc8cb442af497d26ab77fba2a5f1108a845292cf3264265ccced90e311"} Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.435708 4612 scope.go:117] "RemoveContainer" containerID="f6d236d170ce6dec577c0f5c323df170cd9f609ae91ddf240e73b257852e47c3" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.435861 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.463537 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.474590 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.498317 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:46:53 crc kubenswrapper[4612]: E1203 07:46:53.502437 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="985c25c3-b874-4132-a5d8-366ab5e0fab6" containerName="glance-log" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.502475 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="985c25c3-b874-4132-a5d8-366ab5e0fab6" containerName="glance-log" Dec 03 07:46:53 crc kubenswrapper[4612]: E1203 07:46:53.502521 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="985c25c3-b874-4132-a5d8-366ab5e0fab6" containerName="glance-httpd" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.502531 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="985c25c3-b874-4132-a5d8-366ab5e0fab6" containerName="glance-httpd" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.503305 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="985c25c3-b874-4132-a5d8-366ab5e0fab6" containerName="glance-httpd" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.503324 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="985c25c3-b874-4132-a5d8-366ab5e0fab6" containerName="glance-log" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.520783 4612 scope.go:117] "RemoveContainer" containerID="e0badb7ce14b685f090002a5b47794913e81e794ec5d65f79e999e088a621874" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.536742 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.562301 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.570105 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.570118 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.626300 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/33add78e-3ec1-42dd-90aa-9df9f53028b3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.626553 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33add78e-3ec1-42dd-90aa-9df9f53028b3-scripts\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.626657 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwn8r\" (UniqueName: \"kubernetes.io/projected/33add78e-3ec1-42dd-90aa-9df9f53028b3-kube-api-access-nwn8r\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.626759 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33add78e-3ec1-42dd-90aa-9df9f53028b3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.626933 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.627074 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/33add78e-3ec1-42dd-90aa-9df9f53028b3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.627186 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33add78e-3ec1-42dd-90aa-9df9f53028b3-config-data\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.627290 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33add78e-3ec1-42dd-90aa-9df9f53028b3-logs\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.728988 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.729358 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/33add78e-3ec1-42dd-90aa-9df9f53028b3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.729393 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33add78e-3ec1-42dd-90aa-9df9f53028b3-config-data\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.729163 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.729436 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33add78e-3ec1-42dd-90aa-9df9f53028b3-logs\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.729573 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/33add78e-3ec1-42dd-90aa-9df9f53028b3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.729598 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33add78e-3ec1-42dd-90aa-9df9f53028b3-scripts\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.729629 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwn8r\" (UniqueName: \"kubernetes.io/projected/33add78e-3ec1-42dd-90aa-9df9f53028b3-kube-api-access-nwn8r\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.729655 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33add78e-3ec1-42dd-90aa-9df9f53028b3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.736504 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33add78e-3ec1-42dd-90aa-9df9f53028b3-logs\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.741163 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/33add78e-3ec1-42dd-90aa-9df9f53028b3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.748856 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33add78e-3ec1-42dd-90aa-9df9f53028b3-config-data\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.749655 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33add78e-3ec1-42dd-90aa-9df9f53028b3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.752373 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33add78e-3ec1-42dd-90aa-9df9f53028b3-scripts\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.755505 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/33add78e-3ec1-42dd-90aa-9df9f53028b3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.758036 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwn8r\" (UniqueName: \"kubernetes.io/projected/33add78e-3ec1-42dd-90aa-9df9f53028b3-kube-api-access-nwn8r\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.824862 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"33add78e-3ec1-42dd-90aa-9df9f53028b3\") " pod="openstack/glance-default-external-api-0" Dec 03 07:46:53 crc kubenswrapper[4612]: I1203 07:46:53.888323 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 07:46:54 crc kubenswrapper[4612]: I1203 07:46:54.559295 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 07:46:55 crc kubenswrapper[4612]: I1203 07:46:55.106954 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="985c25c3-b874-4132-a5d8-366ab5e0fab6" path="/var/lib/kubelet/pods/985c25c3-b874-4132-a5d8-366ab5e0fab6/volumes" Dec 03 07:46:55 crc kubenswrapper[4612]: I1203 07:46:55.485566 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"33add78e-3ec1-42dd-90aa-9df9f53028b3","Type":"ContainerStarted","Data":"94f3732c0b954f300489bd3be1a128d35915dd2610372b8e52d1a3489cd2f504"} Dec 03 07:46:55 crc kubenswrapper[4612]: I1203 07:46:55.485618 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"33add78e-3ec1-42dd-90aa-9df9f53028b3","Type":"ContainerStarted","Data":"360d9aeac6f6d3da9fefc64441f0ded532d53cfd86d986414585605f3910aff1"} Dec 03 07:46:56 crc kubenswrapper[4612]: I1203 07:46:56.499482 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"33add78e-3ec1-42dd-90aa-9df9f53028b3","Type":"ContainerStarted","Data":"3324443085d40dfe63ed5b4cd114656475dd2d0311ca42f772eb5ce5f238ebe6"} Dec 03 07:46:56 crc kubenswrapper[4612]: I1203 07:46:56.520309 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.5202711190000002 podStartE2EDuration="3.520271119s" podCreationTimestamp="2025-12-03 07:46:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:46:56.516877005 +0000 UTC m=+1179.690234405" watchObservedRunningTime="2025-12-03 07:46:56.520271119 +0000 UTC m=+1179.693628529" Dec 03 07:46:58 crc kubenswrapper[4612]: I1203 07:46:58.005889 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 07:46:58 crc kubenswrapper[4612]: I1203 07:46:58.006245 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 07:46:58 crc kubenswrapper[4612]: I1203 07:46:58.036563 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 07:46:58 crc kubenswrapper[4612]: I1203 07:46:58.051448 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 07:46:58 crc kubenswrapper[4612]: I1203 07:46:58.519364 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 07:46:58 crc kubenswrapper[4612]: I1203 07:46:58.519399 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 07:47:00 crc kubenswrapper[4612]: I1203 07:47:00.778253 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-767d79bd88-5spkc" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 03 07:47:00 crc kubenswrapper[4612]: I1203 07:47:00.805331 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-55fc5c6c94-pjh5s" podUID="29d52104-a465-4ca0-a040-d9dba9e47600" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Dec 03 07:47:01 crc kubenswrapper[4612]: I1203 07:47:01.043497 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 07:47:01 crc kubenswrapper[4612]: I1203 07:47:01.043593 4612 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 07:47:01 crc kubenswrapper[4612]: I1203 07:47:01.070561 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 07:47:03 crc kubenswrapper[4612]: I1203 07:47:03.889335 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 07:47:03 crc kubenswrapper[4612]: I1203 07:47:03.889828 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 07:47:03 crc kubenswrapper[4612]: I1203 07:47:03.953458 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 07:47:03 crc kubenswrapper[4612]: I1203 07:47:03.955497 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 07:47:04 crc kubenswrapper[4612]: I1203 07:47:04.586188 4612 generic.go:334] "Generic (PLEG): container finished" podID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerID="6e0a33bdde4cc42f2d284c011e4f75a9b9bb2e96987f2286cb5d78e984db5bda" exitCode=137 Dec 03 07:47:04 crc kubenswrapper[4612]: I1203 07:47:04.586389 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c","Type":"ContainerDied","Data":"6e0a33bdde4cc42f2d284c011e4f75a9b9bb2e96987f2286cb5d78e984db5bda"} Dec 03 07:47:04 crc kubenswrapper[4612]: I1203 07:47:04.587038 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 07:47:04 crc kubenswrapper[4612]: I1203 07:47:04.587232 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 07:47:04 crc kubenswrapper[4612]: I1203 07:47:04.972533 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.049459 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6j6nb\" (UniqueName: \"kubernetes.io/projected/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-kube-api-access-6j6nb\") pod \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.050078 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-log-httpd\") pod \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.050201 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-sg-core-conf-yaml\") pod \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.050225 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-scripts\") pod \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.050308 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-config-data\") pod \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.050329 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-run-httpd\") pod \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.050355 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-combined-ca-bundle\") pod \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\" (UID: \"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c\") " Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.050772 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" (UID: "89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.051099 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" (UID: "89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.051138 4612 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.054254 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-scripts" (OuterVolumeSpecName: "scripts") pod "89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" (UID: "89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.054352 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-kube-api-access-6j6nb" (OuterVolumeSpecName: "kube-api-access-6j6nb") pod "89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" (UID: "89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c"). InnerVolumeSpecName "kube-api-access-6j6nb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.081029 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" (UID: "89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.155725 4612 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.156043 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.156053 4612 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.156061 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6j6nb\" (UniqueName: \"kubernetes.io/projected/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-kube-api-access-6j6nb\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.165495 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" (UID: "89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.188038 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-config-data" (OuterVolumeSpecName: "config-data") pod "89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" (UID: "89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.257988 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.258019 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.598034 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-m5ff7" event={"ID":"304d081f-b185-4106-b4e1-56b1bdf60a8f","Type":"ContainerStarted","Data":"b31385f605e860af4bac5da70cfe14abba501d6a181fcc48d734ff66902bab4c"} Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.601852 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.602124 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c","Type":"ContainerDied","Data":"0a1a17a5c5b80643d25a985675d542a66d97c311f15c58c65cd6e0829e15d45e"} Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.602504 4612 scope.go:117] "RemoveContainer" containerID="6e0a33bdde4cc42f2d284c011e4f75a9b9bb2e96987f2286cb5d78e984db5bda" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.622498 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-m5ff7" podStartSLOduration=2.457842094 podStartE2EDuration="15.62248013s" podCreationTimestamp="2025-12-03 07:46:50 +0000 UTC" firstStartedPulling="2025-12-03 07:46:51.791584317 +0000 UTC m=+1174.964941717" lastFinishedPulling="2025-12-03 07:47:04.956222353 +0000 UTC m=+1188.129579753" observedRunningTime="2025-12-03 07:47:05.616459682 +0000 UTC m=+1188.789817102" watchObservedRunningTime="2025-12-03 07:47:05.62248013 +0000 UTC m=+1188.795837530" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.634191 4612 scope.go:117] "RemoveContainer" containerID="1a50efea835451c97ea6190e5fa7f89735d966c21ab5ee910d8453c61e545b0c" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.671255 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.688117 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.697037 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:47:05 crc kubenswrapper[4612]: E1203 07:47:05.697643 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="ceilometer-notification-agent" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.697739 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="ceilometer-notification-agent" Dec 03 07:47:05 crc kubenswrapper[4612]: E1203 07:47:05.697812 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="sg-core" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.697863 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="sg-core" Dec 03 07:47:05 crc kubenswrapper[4612]: E1203 07:47:05.697954 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="ceilometer-central-agent" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.698026 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="ceilometer-central-agent" Dec 03 07:47:05 crc kubenswrapper[4612]: E1203 07:47:05.698102 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="proxy-httpd" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.698161 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="proxy-httpd" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.698397 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="proxy-httpd" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.698486 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="ceilometer-central-agent" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.698571 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="sg-core" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.698665 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" containerName="ceilometer-notification-agent" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.700673 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.704913 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.705299 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.705050 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.718508 4612 scope.go:117] "RemoveContainer" containerID="c94b9407bcaedcc29ef406c2261b439c451ecdbfc29c35428a5715365fc13fd0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.757689 4612 scope.go:117] "RemoveContainer" containerID="c1238e0e10df990d51a5392e1ac8cc525b17caae14e5a022cd268c25852865a5" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.767337 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-scripts\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.767386 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.767692 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-run-httpd\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.767745 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-config-data\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.767797 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.767892 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6c4mf\" (UniqueName: \"kubernetes.io/projected/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-kube-api-access-6c4mf\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.768023 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-log-httpd\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.870065 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.870125 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6c4mf\" (UniqueName: \"kubernetes.io/projected/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-kube-api-access-6c4mf\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.870170 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-log-httpd\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.870208 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-scripts\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.870234 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.870310 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-run-httpd\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.870355 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-config-data\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.872095 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-run-httpd\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.872209 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-log-httpd\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.880750 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.881500 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-scripts\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.886644 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.895882 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-config-data\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:05 crc kubenswrapper[4612]: I1203 07:47:05.905633 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6c4mf\" (UniqueName: \"kubernetes.io/projected/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-kube-api-access-6c4mf\") pod \"ceilometer-0\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " pod="openstack/ceilometer-0" Dec 03 07:47:06 crc kubenswrapper[4612]: I1203 07:47:06.034041 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:47:06 crc kubenswrapper[4612]: I1203 07:47:06.515059 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:47:06 crc kubenswrapper[4612]: I1203 07:47:06.612688 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c450e6d-fa9c-4094-a107-ea7790ef7ae9","Type":"ContainerStarted","Data":"ad11456dad5c8a977603c939250508ce9f187e5fe46a4b3f2d1208f0dc4dea4d"} Dec 03 07:47:06 crc kubenswrapper[4612]: I1203 07:47:06.948184 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 07:47:06 crc kubenswrapper[4612]: I1203 07:47:06.948509 4612 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 07:47:06 crc kubenswrapper[4612]: I1203 07:47:06.980719 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 07:47:07 crc kubenswrapper[4612]: I1203 07:47:07.117180 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c" path="/var/lib/kubelet/pods/89f4a188-cff1-4c02-8d0a-7fdd43bf2b6c/volumes" Dec 03 07:47:07 crc kubenswrapper[4612]: I1203 07:47:07.645851 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c450e6d-fa9c-4094-a107-ea7790ef7ae9","Type":"ContainerStarted","Data":"fc18733e6c158a031f52f6653b9d66996ddd3d654bb807f1305c9a54d5227752"} Dec 03 07:47:08 crc kubenswrapper[4612]: I1203 07:47:08.655206 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c450e6d-fa9c-4094-a107-ea7790ef7ae9","Type":"ContainerStarted","Data":"20599ef254c1a171961bdacb56d3b8573c1a468134aba9219807bf7b6e146b5b"} Dec 03 07:47:08 crc kubenswrapper[4612]: I1203 07:47:08.879208 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:47:09 crc kubenswrapper[4612]: I1203 07:47:09.682480 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c450e6d-fa9c-4094-a107-ea7790ef7ae9","Type":"ContainerStarted","Data":"22bc3c450c701387a5d81a0b92d177fba25cdbf673b70c5f0cce9d07f8e7cae6"} Dec 03 07:47:10 crc kubenswrapper[4612]: I1203 07:47:10.699420 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c450e6d-fa9c-4094-a107-ea7790ef7ae9","Type":"ContainerStarted","Data":"f3a6365c98a3c36cf3ff62262782c38a9f6182c563b6b0570ac8b6ecea1c27f1"} Dec 03 07:47:10 crc kubenswrapper[4612]: I1203 07:47:10.699646 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="ceilometer-central-agent" containerID="cri-o://fc18733e6c158a031f52f6653b9d66996ddd3d654bb807f1305c9a54d5227752" gracePeriod=30 Dec 03 07:47:10 crc kubenswrapper[4612]: I1203 07:47:10.699755 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 07:47:10 crc kubenswrapper[4612]: I1203 07:47:10.700122 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="proxy-httpd" containerID="cri-o://f3a6365c98a3c36cf3ff62262782c38a9f6182c563b6b0570ac8b6ecea1c27f1" gracePeriod=30 Dec 03 07:47:10 crc kubenswrapper[4612]: I1203 07:47:10.700311 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="sg-core" containerID="cri-o://22bc3c450c701387a5d81a0b92d177fba25cdbf673b70c5f0cce9d07f8e7cae6" gracePeriod=30 Dec 03 07:47:10 crc kubenswrapper[4612]: I1203 07:47:10.700327 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="ceilometer-notification-agent" containerID="cri-o://20599ef254c1a171961bdacb56d3b8573c1a468134aba9219807bf7b6e146b5b" gracePeriod=30 Dec 03 07:47:10 crc kubenswrapper[4612]: I1203 07:47:10.731637 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.592656325 podStartE2EDuration="5.731616142s" podCreationTimestamp="2025-12-03 07:47:05 +0000 UTC" firstStartedPulling="2025-12-03 07:47:06.524274103 +0000 UTC m=+1189.697631503" lastFinishedPulling="2025-12-03 07:47:09.66323392 +0000 UTC m=+1192.836591320" observedRunningTime="2025-12-03 07:47:10.73152173 +0000 UTC m=+1193.904879130" watchObservedRunningTime="2025-12-03 07:47:10.731616142 +0000 UTC m=+1193.904973542" Dec 03 07:47:11 crc kubenswrapper[4612]: I1203 07:47:11.709840 4612 generic.go:334] "Generic (PLEG): container finished" podID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerID="f3a6365c98a3c36cf3ff62262782c38a9f6182c563b6b0570ac8b6ecea1c27f1" exitCode=0 Dec 03 07:47:11 crc kubenswrapper[4612]: I1203 07:47:11.709870 4612 generic.go:334] "Generic (PLEG): container finished" podID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerID="22bc3c450c701387a5d81a0b92d177fba25cdbf673b70c5f0cce9d07f8e7cae6" exitCode=2 Dec 03 07:47:11 crc kubenswrapper[4612]: I1203 07:47:11.709877 4612 generic.go:334] "Generic (PLEG): container finished" podID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerID="20599ef254c1a171961bdacb56d3b8573c1a468134aba9219807bf7b6e146b5b" exitCode=0 Dec 03 07:47:11 crc kubenswrapper[4612]: I1203 07:47:11.709896 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c450e6d-fa9c-4094-a107-ea7790ef7ae9","Type":"ContainerDied","Data":"f3a6365c98a3c36cf3ff62262782c38a9f6182c563b6b0570ac8b6ecea1c27f1"} Dec 03 07:47:11 crc kubenswrapper[4612]: I1203 07:47:11.709921 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c450e6d-fa9c-4094-a107-ea7790ef7ae9","Type":"ContainerDied","Data":"22bc3c450c701387a5d81a0b92d177fba25cdbf673b70c5f0cce9d07f8e7cae6"} Dec 03 07:47:11 crc kubenswrapper[4612]: I1203 07:47:11.709930 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c450e6d-fa9c-4094-a107-ea7790ef7ae9","Type":"ContainerDied","Data":"20599ef254c1a171961bdacb56d3b8573c1a468134aba9219807bf7b6e146b5b"} Dec 03 07:47:14 crc kubenswrapper[4612]: I1203 07:47:14.058645 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:47:14 crc kubenswrapper[4612]: I1203 07:47:14.155051 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:47:15 crc kubenswrapper[4612]: I1203 07:47:15.870882 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-55fc5c6c94-pjh5s" Dec 03 07:47:15 crc kubenswrapper[4612]: I1203 07:47:15.940068 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-767d79bd88-5spkc"] Dec 03 07:47:15 crc kubenswrapper[4612]: I1203 07:47:15.940652 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-767d79bd88-5spkc" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon-log" containerID="cri-o://5f382c79b6c5dc2c810237d3351ca896247a02c86f54a420b90aa489990e83c8" gracePeriod=30 Dec 03 07:47:15 crc kubenswrapper[4612]: I1203 07:47:15.943892 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-767d79bd88-5spkc" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" containerID="cri-o://6845b3347b561dff2c7aaf840fb49d55a2770102726dd8f15710394a1f37b660" gracePeriod=30 Dec 03 07:47:15 crc kubenswrapper[4612]: I1203 07:47:15.952887 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-767d79bd88-5spkc" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": EOF" Dec 03 07:47:18 crc kubenswrapper[4612]: I1203 07:47:18.794234 4612 generic.go:334] "Generic (PLEG): container finished" podID="304d081f-b185-4106-b4e1-56b1bdf60a8f" containerID="b31385f605e860af4bac5da70cfe14abba501d6a181fcc48d734ff66902bab4c" exitCode=0 Dec 03 07:47:18 crc kubenswrapper[4612]: I1203 07:47:18.794562 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-m5ff7" event={"ID":"304d081f-b185-4106-b4e1-56b1bdf60a8f","Type":"ContainerDied","Data":"b31385f605e860af4bac5da70cfe14abba501d6a181fcc48d734ff66902bab4c"} Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.222218 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.351647 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rw5ht\" (UniqueName: \"kubernetes.io/projected/304d081f-b185-4106-b4e1-56b1bdf60a8f-kube-api-access-rw5ht\") pod \"304d081f-b185-4106-b4e1-56b1bdf60a8f\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.351810 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-config-data\") pod \"304d081f-b185-4106-b4e1-56b1bdf60a8f\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.351843 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-combined-ca-bundle\") pod \"304d081f-b185-4106-b4e1-56b1bdf60a8f\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.351920 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-scripts\") pod \"304d081f-b185-4106-b4e1-56b1bdf60a8f\" (UID: \"304d081f-b185-4106-b4e1-56b1bdf60a8f\") " Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.360276 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-scripts" (OuterVolumeSpecName: "scripts") pod "304d081f-b185-4106-b4e1-56b1bdf60a8f" (UID: "304d081f-b185-4106-b4e1-56b1bdf60a8f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.367141 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/304d081f-b185-4106-b4e1-56b1bdf60a8f-kube-api-access-rw5ht" (OuterVolumeSpecName: "kube-api-access-rw5ht") pod "304d081f-b185-4106-b4e1-56b1bdf60a8f" (UID: "304d081f-b185-4106-b4e1-56b1bdf60a8f"). InnerVolumeSpecName "kube-api-access-rw5ht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.378283 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "304d081f-b185-4106-b4e1-56b1bdf60a8f" (UID: "304d081f-b185-4106-b4e1-56b1bdf60a8f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.384108 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-config-data" (OuterVolumeSpecName: "config-data") pod "304d081f-b185-4106-b4e1-56b1bdf60a8f" (UID: "304d081f-b185-4106-b4e1-56b1bdf60a8f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.392969 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-767d79bd88-5spkc" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:36656->10.217.0.145:8443: read: connection reset by peer" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.453736 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rw5ht\" (UniqueName: \"kubernetes.io/projected/304d081f-b185-4106-b4e1-56b1bdf60a8f-kube-api-access-rw5ht\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.453774 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.453784 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.453793 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/304d081f-b185-4106-b4e1-56b1bdf60a8f-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.777337 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-767d79bd88-5spkc" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.813806 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-m5ff7" event={"ID":"304d081f-b185-4106-b4e1-56b1bdf60a8f","Type":"ContainerDied","Data":"9704ad8a552265f14fc4d68264ac447eec311da4288b0adf9b3c09c58873e736"} Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.813845 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9704ad8a552265f14fc4d68264ac447eec311da4288b0adf9b3c09c58873e736" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.813896 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-m5ff7" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.821048 4612 generic.go:334] "Generic (PLEG): container finished" podID="7462fb55-15b7-4416-b34f-23893766b5ed" containerID="6845b3347b561dff2c7aaf840fb49d55a2770102726dd8f15710394a1f37b660" exitCode=0 Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.821099 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-767d79bd88-5spkc" event={"ID":"7462fb55-15b7-4416-b34f-23893766b5ed","Type":"ContainerDied","Data":"6845b3347b561dff2c7aaf840fb49d55a2770102726dd8f15710394a1f37b660"} Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.821137 4612 scope.go:117] "RemoveContainer" containerID="4ab7257fb81ce6e4c6ccbe3f788b4aeba206cf28cb86b4855a2f3df4b1918a01" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.934436 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 07:47:20 crc kubenswrapper[4612]: E1203 07:47:20.934841 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="304d081f-b185-4106-b4e1-56b1bdf60a8f" containerName="nova-cell0-conductor-db-sync" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.934859 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="304d081f-b185-4106-b4e1-56b1bdf60a8f" containerName="nova-cell0-conductor-db-sync" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.935106 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="304d081f-b185-4106-b4e1-56b1bdf60a8f" containerName="nova-cell0-conductor-db-sync" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.935852 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.941393 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.941637 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-pjkn4" Dec 03 07:47:20 crc kubenswrapper[4612]: I1203 07:47:20.954132 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.064172 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpf2t\" (UniqueName: \"kubernetes.io/projected/e95122bf-4ece-43ec-9974-c8388713d7d6-kube-api-access-vpf2t\") pod \"nova-cell0-conductor-0\" (UID: \"e95122bf-4ece-43ec-9974-c8388713d7d6\") " pod="openstack/nova-cell0-conductor-0" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.064352 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95122bf-4ece-43ec-9974-c8388713d7d6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e95122bf-4ece-43ec-9974-c8388713d7d6\") " pod="openstack/nova-cell0-conductor-0" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.064498 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e95122bf-4ece-43ec-9974-c8388713d7d6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e95122bf-4ece-43ec-9974-c8388713d7d6\") " pod="openstack/nova-cell0-conductor-0" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.165745 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpf2t\" (UniqueName: \"kubernetes.io/projected/e95122bf-4ece-43ec-9974-c8388713d7d6-kube-api-access-vpf2t\") pod \"nova-cell0-conductor-0\" (UID: \"e95122bf-4ece-43ec-9974-c8388713d7d6\") " pod="openstack/nova-cell0-conductor-0" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.165828 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95122bf-4ece-43ec-9974-c8388713d7d6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e95122bf-4ece-43ec-9974-c8388713d7d6\") " pod="openstack/nova-cell0-conductor-0" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.165866 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e95122bf-4ece-43ec-9974-c8388713d7d6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e95122bf-4ece-43ec-9974-c8388713d7d6\") " pod="openstack/nova-cell0-conductor-0" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.171843 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95122bf-4ece-43ec-9974-c8388713d7d6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e95122bf-4ece-43ec-9974-c8388713d7d6\") " pod="openstack/nova-cell0-conductor-0" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.173212 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e95122bf-4ece-43ec-9974-c8388713d7d6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e95122bf-4ece-43ec-9974-c8388713d7d6\") " pod="openstack/nova-cell0-conductor-0" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.188814 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpf2t\" (UniqueName: \"kubernetes.io/projected/e95122bf-4ece-43ec-9974-c8388713d7d6-kube-api-access-vpf2t\") pod \"nova-cell0-conductor-0\" (UID: \"e95122bf-4ece-43ec-9974-c8388713d7d6\") " pod="openstack/nova-cell0-conductor-0" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.268997 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.703240 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.837792 4612 generic.go:334] "Generic (PLEG): container finished" podID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerID="fc18733e6c158a031f52f6653b9d66996ddd3d654bb807f1305c9a54d5227752" exitCode=0 Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.837989 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c450e6d-fa9c-4094-a107-ea7790ef7ae9","Type":"ContainerDied","Data":"fc18733e6c158a031f52f6653b9d66996ddd3d654bb807f1305c9a54d5227752"} Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.838418 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c450e6d-fa9c-4094-a107-ea7790ef7ae9","Type":"ContainerDied","Data":"ad11456dad5c8a977603c939250508ce9f187e5fe46a4b3f2d1208f0dc4dea4d"} Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.838499 4612 scope.go:117] "RemoveContainer" containerID="f3a6365c98a3c36cf3ff62262782c38a9f6182c563b6b0570ac8b6ecea1c27f1" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.838120 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.868927 4612 scope.go:117] "RemoveContainer" containerID="22bc3c450c701387a5d81a0b92d177fba25cdbf673b70c5f0cce9d07f8e7cae6" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.869347 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.883976 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6c4mf\" (UniqueName: \"kubernetes.io/projected/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-kube-api-access-6c4mf\") pod \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.884071 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-config-data\") pod \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.884166 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-combined-ca-bundle\") pod \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.884252 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-scripts\") pod \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.884302 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-run-httpd\") pod \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.884328 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-log-httpd\") pod \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.884356 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-sg-core-conf-yaml\") pod \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\" (UID: \"1c450e6d-fa9c-4094-a107-ea7790ef7ae9\") " Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.891464 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1c450e6d-fa9c-4094-a107-ea7790ef7ae9" (UID: "1c450e6d-fa9c-4094-a107-ea7790ef7ae9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.891729 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1c450e6d-fa9c-4094-a107-ea7790ef7ae9" (UID: "1c450e6d-fa9c-4094-a107-ea7790ef7ae9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.894061 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-scripts" (OuterVolumeSpecName: "scripts") pod "1c450e6d-fa9c-4094-a107-ea7790ef7ae9" (UID: "1c450e6d-fa9c-4094-a107-ea7790ef7ae9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.896064 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-kube-api-access-6c4mf" (OuterVolumeSpecName: "kube-api-access-6c4mf") pod "1c450e6d-fa9c-4094-a107-ea7790ef7ae9" (UID: "1c450e6d-fa9c-4094-a107-ea7790ef7ae9"). InnerVolumeSpecName "kube-api-access-6c4mf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.900492 4612 scope.go:117] "RemoveContainer" containerID="20599ef254c1a171961bdacb56d3b8573c1a468134aba9219807bf7b6e146b5b" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.922756 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1c450e6d-fa9c-4094-a107-ea7790ef7ae9" (UID: "1c450e6d-fa9c-4094-a107-ea7790ef7ae9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.933277 4612 scope.go:117] "RemoveContainer" containerID="fc18733e6c158a031f52f6653b9d66996ddd3d654bb807f1305c9a54d5227752" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.956838 4612 scope.go:117] "RemoveContainer" containerID="f3a6365c98a3c36cf3ff62262782c38a9f6182c563b6b0570ac8b6ecea1c27f1" Dec 03 07:47:21 crc kubenswrapper[4612]: E1203 07:47:21.957508 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3a6365c98a3c36cf3ff62262782c38a9f6182c563b6b0570ac8b6ecea1c27f1\": container with ID starting with f3a6365c98a3c36cf3ff62262782c38a9f6182c563b6b0570ac8b6ecea1c27f1 not found: ID does not exist" containerID="f3a6365c98a3c36cf3ff62262782c38a9f6182c563b6b0570ac8b6ecea1c27f1" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.957568 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3a6365c98a3c36cf3ff62262782c38a9f6182c563b6b0570ac8b6ecea1c27f1"} err="failed to get container status \"f3a6365c98a3c36cf3ff62262782c38a9f6182c563b6b0570ac8b6ecea1c27f1\": rpc error: code = NotFound desc = could not find container \"f3a6365c98a3c36cf3ff62262782c38a9f6182c563b6b0570ac8b6ecea1c27f1\": container with ID starting with f3a6365c98a3c36cf3ff62262782c38a9f6182c563b6b0570ac8b6ecea1c27f1 not found: ID does not exist" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.957598 4612 scope.go:117] "RemoveContainer" containerID="22bc3c450c701387a5d81a0b92d177fba25cdbf673b70c5f0cce9d07f8e7cae6" Dec 03 07:47:21 crc kubenswrapper[4612]: E1203 07:47:21.958072 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22bc3c450c701387a5d81a0b92d177fba25cdbf673b70c5f0cce9d07f8e7cae6\": container with ID starting with 22bc3c450c701387a5d81a0b92d177fba25cdbf673b70c5f0cce9d07f8e7cae6 not found: ID does not exist" containerID="22bc3c450c701387a5d81a0b92d177fba25cdbf673b70c5f0cce9d07f8e7cae6" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.958109 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22bc3c450c701387a5d81a0b92d177fba25cdbf673b70c5f0cce9d07f8e7cae6"} err="failed to get container status \"22bc3c450c701387a5d81a0b92d177fba25cdbf673b70c5f0cce9d07f8e7cae6\": rpc error: code = NotFound desc = could not find container \"22bc3c450c701387a5d81a0b92d177fba25cdbf673b70c5f0cce9d07f8e7cae6\": container with ID starting with 22bc3c450c701387a5d81a0b92d177fba25cdbf673b70c5f0cce9d07f8e7cae6 not found: ID does not exist" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.958133 4612 scope.go:117] "RemoveContainer" containerID="20599ef254c1a171961bdacb56d3b8573c1a468134aba9219807bf7b6e146b5b" Dec 03 07:47:21 crc kubenswrapper[4612]: E1203 07:47:21.958988 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20599ef254c1a171961bdacb56d3b8573c1a468134aba9219807bf7b6e146b5b\": container with ID starting with 20599ef254c1a171961bdacb56d3b8573c1a468134aba9219807bf7b6e146b5b not found: ID does not exist" containerID="20599ef254c1a171961bdacb56d3b8573c1a468134aba9219807bf7b6e146b5b" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.959016 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20599ef254c1a171961bdacb56d3b8573c1a468134aba9219807bf7b6e146b5b"} err="failed to get container status \"20599ef254c1a171961bdacb56d3b8573c1a468134aba9219807bf7b6e146b5b\": rpc error: code = NotFound desc = could not find container \"20599ef254c1a171961bdacb56d3b8573c1a468134aba9219807bf7b6e146b5b\": container with ID starting with 20599ef254c1a171961bdacb56d3b8573c1a468134aba9219807bf7b6e146b5b not found: ID does not exist" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.959029 4612 scope.go:117] "RemoveContainer" containerID="fc18733e6c158a031f52f6653b9d66996ddd3d654bb807f1305c9a54d5227752" Dec 03 07:47:21 crc kubenswrapper[4612]: E1203 07:47:21.959875 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc18733e6c158a031f52f6653b9d66996ddd3d654bb807f1305c9a54d5227752\": container with ID starting with fc18733e6c158a031f52f6653b9d66996ddd3d654bb807f1305c9a54d5227752 not found: ID does not exist" containerID="fc18733e6c158a031f52f6653b9d66996ddd3d654bb807f1305c9a54d5227752" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.959929 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc18733e6c158a031f52f6653b9d66996ddd3d654bb807f1305c9a54d5227752"} err="failed to get container status \"fc18733e6c158a031f52f6653b9d66996ddd3d654bb807f1305c9a54d5227752\": rpc error: code = NotFound desc = could not find container \"fc18733e6c158a031f52f6653b9d66996ddd3d654bb807f1305c9a54d5227752\": container with ID starting with fc18733e6c158a031f52f6653b9d66996ddd3d654bb807f1305c9a54d5227752 not found: ID does not exist" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.965380 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c450e6d-fa9c-4094-a107-ea7790ef7ae9" (UID: "1c450e6d-fa9c-4094-a107-ea7790ef7ae9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.988572 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.988601 4612 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.988610 4612 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.988619 4612 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.988627 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6c4mf\" (UniqueName: \"kubernetes.io/projected/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-kube-api-access-6c4mf\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:21 crc kubenswrapper[4612]: I1203 07:47:21.988636 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.004045 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-config-data" (OuterVolumeSpecName: "config-data") pod "1c450e6d-fa9c-4094-a107-ea7790ef7ae9" (UID: "1c450e6d-fa9c-4094-a107-ea7790ef7ae9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.090073 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c450e6d-fa9c-4094-a107-ea7790ef7ae9-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.174012 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.187073 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.222642 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:47:22 crc kubenswrapper[4612]: E1203 07:47:22.223218 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="proxy-httpd" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.223295 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="proxy-httpd" Dec 03 07:47:22 crc kubenswrapper[4612]: E1203 07:47:22.223385 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="sg-core" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.223436 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="sg-core" Dec 03 07:47:22 crc kubenswrapper[4612]: E1203 07:47:22.223498 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="ceilometer-central-agent" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.223549 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="ceilometer-central-agent" Dec 03 07:47:22 crc kubenswrapper[4612]: E1203 07:47:22.223597 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="ceilometer-notification-agent" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.223651 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="ceilometer-notification-agent" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.223921 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="ceilometer-central-agent" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.224005 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="proxy-httpd" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.224078 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="ceilometer-notification-agent" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.224149 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" containerName="sg-core" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.225740 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.233099 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.233826 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.234310 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.294169 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.294222 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-config-data\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.294237 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.294262 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3640d30-85b5-4b87-8860-ca61e792f7f7-run-httpd\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.294284 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lm6b\" (UniqueName: \"kubernetes.io/projected/e3640d30-85b5-4b87-8860-ca61e792f7f7-kube-api-access-8lm6b\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.294336 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3640d30-85b5-4b87-8860-ca61e792f7f7-log-httpd\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.294384 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-scripts\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.394920 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-scripts\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.395027 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.395053 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-config-data\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.395068 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.395090 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3640d30-85b5-4b87-8860-ca61e792f7f7-run-httpd\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.395111 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lm6b\" (UniqueName: \"kubernetes.io/projected/e3640d30-85b5-4b87-8860-ca61e792f7f7-kube-api-access-8lm6b\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.395161 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3640d30-85b5-4b87-8860-ca61e792f7f7-log-httpd\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.395660 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3640d30-85b5-4b87-8860-ca61e792f7f7-log-httpd\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.395836 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3640d30-85b5-4b87-8860-ca61e792f7f7-run-httpd\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.406857 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.407733 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.407765 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-scripts\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.422843 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-config-data\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.440585 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lm6b\" (UniqueName: \"kubernetes.io/projected/e3640d30-85b5-4b87-8860-ca61e792f7f7-kube-api-access-8lm6b\") pod \"ceilometer-0\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.572111 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.875269 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"e95122bf-4ece-43ec-9974-c8388713d7d6","Type":"ContainerStarted","Data":"a0bd821bad8b7a1bf8efa61acdacbf827792da7aab66dac185544a38352b7db6"} Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.875508 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"e95122bf-4ece-43ec-9974-c8388713d7d6","Type":"ContainerStarted","Data":"43f4e3ed15c1c8285941ede27aefd1ef6eb9974f92c32956af516b36e90979b1"} Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.875536 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 03 07:47:22 crc kubenswrapper[4612]: I1203 07:47:22.909018 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.909004414 podStartE2EDuration="2.909004414s" podCreationTimestamp="2025-12-03 07:47:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:47:22.908398749 +0000 UTC m=+1206.081756149" watchObservedRunningTime="2025-12-03 07:47:22.909004414 +0000 UTC m=+1206.082361814" Dec 03 07:47:23 crc kubenswrapper[4612]: I1203 07:47:23.103911 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c450e6d-fa9c-4094-a107-ea7790ef7ae9" path="/var/lib/kubelet/pods/1c450e6d-fa9c-4094-a107-ea7790ef7ae9/volumes" Dec 03 07:47:23 crc kubenswrapper[4612]: I1203 07:47:23.195570 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:47:23 crc kubenswrapper[4612]: I1203 07:47:23.884073 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e3640d30-85b5-4b87-8860-ca61e792f7f7","Type":"ContainerStarted","Data":"70cf633d885a11a761cf80b91ef6a1be9d17be4c66ea4b3aac262844253a9f83"} Dec 03 07:47:24 crc kubenswrapper[4612]: I1203 07:47:24.897002 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e3640d30-85b5-4b87-8860-ca61e792f7f7","Type":"ContainerStarted","Data":"60adffd544564dbd18f673d1700798d68d91fbcf2898a77e2b54e0f4a11e161d"} Dec 03 07:47:24 crc kubenswrapper[4612]: I1203 07:47:24.897360 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e3640d30-85b5-4b87-8860-ca61e792f7f7","Type":"ContainerStarted","Data":"342a36c885660e93446eeba25c3e71e86529e69e047e1725b0501b4147156967"} Dec 03 07:47:25 crc kubenswrapper[4612]: I1203 07:47:25.906587 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e3640d30-85b5-4b87-8860-ca61e792f7f7","Type":"ContainerStarted","Data":"bacf26bb814efdd07b2f119882a59f1df7acbbadc12192a18ac752db5f98a59a"} Dec 03 07:47:26 crc kubenswrapper[4612]: I1203 07:47:26.916028 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e3640d30-85b5-4b87-8860-ca61e792f7f7","Type":"ContainerStarted","Data":"10a80cc13e56066297ff05b4ed913032da852fd0b364a56133a3c01308364816"} Dec 03 07:47:26 crc kubenswrapper[4612]: I1203 07:47:26.917544 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 07:47:26 crc kubenswrapper[4612]: I1203 07:47:26.935535 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.623504055 podStartE2EDuration="4.935519862s" podCreationTimestamp="2025-12-03 07:47:22 +0000 UTC" firstStartedPulling="2025-12-03 07:47:23.218477848 +0000 UTC m=+1206.391835248" lastFinishedPulling="2025-12-03 07:47:26.530493665 +0000 UTC m=+1209.703851055" observedRunningTime="2025-12-03 07:47:26.934962538 +0000 UTC m=+1210.108319948" watchObservedRunningTime="2025-12-03 07:47:26.935519862 +0000 UTC m=+1210.108877262" Dec 03 07:47:30 crc kubenswrapper[4612]: I1203 07:47:30.777193 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-767d79bd88-5spkc" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 03 07:47:31 crc kubenswrapper[4612]: I1203 07:47:31.316062 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.053528 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-q746f"] Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.056423 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.061205 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.063792 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-q746f"] Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.084638 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.195254 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrgr8\" (UniqueName: \"kubernetes.io/projected/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-kube-api-access-nrgr8\") pod \"nova-cell0-cell-mapping-q746f\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.195340 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-scripts\") pod \"nova-cell0-cell-mapping-q746f\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.195400 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-q746f\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.195444 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-config-data\") pod \"nova-cell0-cell-mapping-q746f\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.276734 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.278200 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.280860 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.297926 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrgr8\" (UniqueName: \"kubernetes.io/projected/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-kube-api-access-nrgr8\") pod \"nova-cell0-cell-mapping-q746f\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.298026 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-scripts\") pod \"nova-cell0-cell-mapping-q746f\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.298078 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-q746f\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.298110 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-config-data\") pod \"nova-cell0-cell-mapping-q746f\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.301504 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.308715 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-config-data\") pod \"nova-cell0-cell-mapping-q746f\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.308744 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-scripts\") pod \"nova-cell0-cell-mapping-q746f\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.311533 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-q746f\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.380644 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrgr8\" (UniqueName: \"kubernetes.io/projected/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-kube-api-access-nrgr8\") pod \"nova-cell0-cell-mapping-q746f\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.382348 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.415064 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gcl2\" (UniqueName: \"kubernetes.io/projected/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-kube-api-access-9gcl2\") pod \"nova-api-0\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " pod="openstack/nova-api-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.415121 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-config-data\") pod \"nova-api-0\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " pod="openstack/nova-api-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.415198 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " pod="openstack/nova-api-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.415244 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-logs\") pod \"nova-api-0\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " pod="openstack/nova-api-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.520616 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " pod="openstack/nova-api-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.520679 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-logs\") pod \"nova-api-0\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " pod="openstack/nova-api-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.520782 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-config-data\") pod \"nova-api-0\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " pod="openstack/nova-api-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.520810 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gcl2\" (UniqueName: \"kubernetes.io/projected/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-kube-api-access-9gcl2\") pod \"nova-api-0\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " pod="openstack/nova-api-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.523437 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-logs\") pod \"nova-api-0\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " pod="openstack/nova-api-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.539657 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.541573 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.549299 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-config-data\") pod \"nova-api-0\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " pod="openstack/nova-api-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.574665 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " pod="openstack/nova-api-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.598075 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gcl2\" (UniqueName: \"kubernetes.io/projected/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-kube-api-access-9gcl2\") pod \"nova-api-0\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " pod="openstack/nova-api-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.602055 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.604059 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.605513 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.618314 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.647046 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.678098 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.707252 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.718799 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.733753 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.735099 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f876fbab-9c17-44f0-b708-27d573e171e8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f876fbab-9c17-44f0-b708-27d573e171e8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.735248 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.735409 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f876fbab-9c17-44f0-b708-27d573e171e8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f876fbab-9c17-44f0-b708-27d573e171e8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.738312 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-config-data\") pod \"nova-scheduler-0\" (UID: \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.738438 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6dqn\" (UniqueName: \"kubernetes.io/projected/f876fbab-9c17-44f0-b708-27d573e171e8-kube-api-access-c6dqn\") pod \"nova-cell1-novncproxy-0\" (UID: \"f876fbab-9c17-44f0-b708-27d573e171e8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.738581 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fx9dj\" (UniqueName: \"kubernetes.io/projected/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-kube-api-access-fx9dj\") pod \"nova-scheduler-0\" (UID: \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.746596 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.840124 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-config-data\") pod \"nova-scheduler-0\" (UID: \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.840192 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6dqn\" (UniqueName: \"kubernetes.io/projected/f876fbab-9c17-44f0-b708-27d573e171e8-kube-api-access-c6dqn\") pod \"nova-cell1-novncproxy-0\" (UID: \"f876fbab-9c17-44f0-b708-27d573e171e8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.840251 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fx9dj\" (UniqueName: \"kubernetes.io/projected/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-kube-api-access-fx9dj\") pod \"nova-scheduler-0\" (UID: \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.840285 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f876fbab-9c17-44f0-b708-27d573e171e8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f876fbab-9c17-44f0-b708-27d573e171e8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.840305 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.840362 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d96560d-e8ba-436b-b4a9-f157564a0022-logs\") pod \"nova-metadata-0\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " pod="openstack/nova-metadata-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.840385 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr6l6\" (UniqueName: \"kubernetes.io/projected/1d96560d-e8ba-436b-b4a9-f157564a0022-kube-api-access-zr6l6\") pod \"nova-metadata-0\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " pod="openstack/nova-metadata-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.840407 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f876fbab-9c17-44f0-b708-27d573e171e8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f876fbab-9c17-44f0-b708-27d573e171e8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.840469 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d96560d-e8ba-436b-b4a9-f157564a0022-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " pod="openstack/nova-metadata-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.840514 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d96560d-e8ba-436b-b4a9-f157564a0022-config-data\") pod \"nova-metadata-0\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " pod="openstack/nova-metadata-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.863836 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f876fbab-9c17-44f0-b708-27d573e171e8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f876fbab-9c17-44f0-b708-27d573e171e8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.864889 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6dqn\" (UniqueName: \"kubernetes.io/projected/f876fbab-9c17-44f0-b708-27d573e171e8-kube-api-access-c6dqn\") pod \"nova-cell1-novncproxy-0\" (UID: \"f876fbab-9c17-44f0-b708-27d573e171e8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.866142 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-rzcgj"] Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.868125 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.868448 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.868599 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f876fbab-9c17-44f0-b708-27d573e171e8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f876fbab-9c17-44f0-b708-27d573e171e8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.869598 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fx9dj\" (UniqueName: \"kubernetes.io/projected/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-kube-api-access-fx9dj\") pod \"nova-scheduler-0\" (UID: \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.871465 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-config-data\") pod \"nova-scheduler-0\" (UID: \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.882129 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-rzcgj"] Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.891923 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.944574 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d96560d-e8ba-436b-b4a9-f157564a0022-logs\") pod \"nova-metadata-0\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " pod="openstack/nova-metadata-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.944628 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr6l6\" (UniqueName: \"kubernetes.io/projected/1d96560d-e8ba-436b-b4a9-f157564a0022-kube-api-access-zr6l6\") pod \"nova-metadata-0\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " pod="openstack/nova-metadata-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.944687 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d96560d-e8ba-436b-b4a9-f157564a0022-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " pod="openstack/nova-metadata-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.944725 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d96560d-e8ba-436b-b4a9-f157564a0022-config-data\") pod \"nova-metadata-0\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " pod="openstack/nova-metadata-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.946781 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d96560d-e8ba-436b-b4a9-f157564a0022-logs\") pod \"nova-metadata-0\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " pod="openstack/nova-metadata-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.950585 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d96560d-e8ba-436b-b4a9-f157564a0022-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " pod="openstack/nova-metadata-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.954749 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d96560d-e8ba-436b-b4a9-f157564a0022-config-data\") pod \"nova-metadata-0\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " pod="openstack/nova-metadata-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.969054 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr6l6\" (UniqueName: \"kubernetes.io/projected/1d96560d-e8ba-436b-b4a9-f157564a0022-kube-api-access-zr6l6\") pod \"nova-metadata-0\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " pod="openstack/nova-metadata-0" Dec 03 07:47:32 crc kubenswrapper[4612]: I1203 07:47:32.986372 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.021245 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.049810 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.049880 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-dns-svc\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.049900 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.049921 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.049994 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kwwh\" (UniqueName: \"kubernetes.io/projected/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-kube-api-access-8kwwh\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.050012 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-config\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.116460 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.152384 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.158482 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-dns-svc\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.158519 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.158545 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.158625 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kwwh\" (UniqueName: \"kubernetes.io/projected/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-kube-api-access-8kwwh\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.158643 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-config\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.159318 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-config\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.158370 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.159857 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-dns-svc\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.160398 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.166474 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.206275 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kwwh\" (UniqueName: \"kubernetes.io/projected/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-kube-api-access-8kwwh\") pod \"dnsmasq-dns-865f5d856f-rzcgj\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.289182 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-q746f"] Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.490912 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.593763 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.795912 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:47:33 crc kubenswrapper[4612]: I1203 07:47:33.822435 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 07:47:33 crc kubenswrapper[4612]: W1203 07:47:33.846837 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf876fbab_9c17_44f0_b708_27d573e171e8.slice/crio-d4486605f41aeec35170d01b01b11be7efc4147eb984c69b43b267eea7974232 WatchSource:0}: Error finding container d4486605f41aeec35170d01b01b11be7efc4147eb984c69b43b267eea7974232: Status 404 returned error can't find the container with id d4486605f41aeec35170d01b01b11be7efc4147eb984c69b43b267eea7974232 Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.018718 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f876fbab-9c17-44f0-b708-27d573e171e8","Type":"ContainerStarted","Data":"d4486605f41aeec35170d01b01b11be7efc4147eb984c69b43b267eea7974232"} Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.021269 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-q746f" event={"ID":"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd","Type":"ContainerStarted","Data":"c2c9d678cfcac6cd47687aaf57705cf766653520efd2f5c46ca38c7a6ea0123f"} Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.021319 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-q746f" event={"ID":"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd","Type":"ContainerStarted","Data":"e872d41f76f6ab2f3893dc9b81b1cdea804507811554f90c503c6e0a39d718dc"} Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.024080 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47ca23ae-a3cf-4fbb-b2d8-41d188321af5","Type":"ContainerStarted","Data":"6918b07fab7677dc5fdf2fcceaab4bf592b272c6bb57899e0d846913564237dc"} Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.028382 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d673a97-69bf-4125-9b73-d9b79c8f4ab0","Type":"ContainerStarted","Data":"0a4ff52384e8850751241618d3641947704b37bcf530c5a90a42990f905f56cb"} Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.039966 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s5xlq"] Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.041205 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.046667 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.046723 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.057710 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-q746f" podStartSLOduration=2.057688847 podStartE2EDuration="2.057688847s" podCreationTimestamp="2025-12-03 07:47:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:47:34.038320738 +0000 UTC m=+1217.211678148" watchObservedRunningTime="2025-12-03 07:47:34.057688847 +0000 UTC m=+1217.231046247" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.115604 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s5xlq"] Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.122041 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-scripts\") pod \"nova-cell1-conductor-db-sync-s5xlq\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.122136 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8f7p\" (UniqueName: \"kubernetes.io/projected/a0982d29-f1b5-4dd6-b87b-a70a08e54712-kube-api-access-x8f7p\") pod \"nova-cell1-conductor-db-sync-s5xlq\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.122327 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-config-data\") pod \"nova-cell1-conductor-db-sync-s5xlq\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.122443 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s5xlq\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.197714 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.209635 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-rzcgj"] Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.223231 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-scripts\") pod \"nova-cell1-conductor-db-sync-s5xlq\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.223290 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8f7p\" (UniqueName: \"kubernetes.io/projected/a0982d29-f1b5-4dd6-b87b-a70a08e54712-kube-api-access-x8f7p\") pod \"nova-cell1-conductor-db-sync-s5xlq\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.223349 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-config-data\") pod \"nova-cell1-conductor-db-sync-s5xlq\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.223407 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s5xlq\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.226216 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-scripts\") pod \"nova-cell1-conductor-db-sync-s5xlq\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.229192 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-config-data\") pod \"nova-cell1-conductor-db-sync-s5xlq\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.237508 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s5xlq\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.247513 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8f7p\" (UniqueName: \"kubernetes.io/projected/a0982d29-f1b5-4dd6-b87b-a70a08e54712-kube-api-access-x8f7p\") pod \"nova-cell1-conductor-db-sync-s5xlq\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:34 crc kubenswrapper[4612]: I1203 07:47:34.365875 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:35 crc kubenswrapper[4612]: I1203 07:47:35.038414 4612 generic.go:334] "Generic (PLEG): container finished" podID="bdbc5b1d-28f6-46f0-8cca-4a163110d00e" containerID="d6be38c30d036590da17c668bd473ba55009655eb1ee2c5f66fab8d9cc346bc7" exitCode=0 Dec 03 07:47:35 crc kubenswrapper[4612]: I1203 07:47:35.038737 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" event={"ID":"bdbc5b1d-28f6-46f0-8cca-4a163110d00e","Type":"ContainerDied","Data":"d6be38c30d036590da17c668bd473ba55009655eb1ee2c5f66fab8d9cc346bc7"} Dec 03 07:47:35 crc kubenswrapper[4612]: I1203 07:47:35.038763 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" event={"ID":"bdbc5b1d-28f6-46f0-8cca-4a163110d00e","Type":"ContainerStarted","Data":"1e121674b70d33f5f67481fb9d4ecbdb7a7ebfbfea978b61132dcb2ebf1d8a1e"} Dec 03 07:47:35 crc kubenswrapper[4612]: I1203 07:47:35.042285 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1d96560d-e8ba-436b-b4a9-f157564a0022","Type":"ContainerStarted","Data":"14f6b676a10ba9a5bf2115faf9fa304902df9d185cbaaa206ddebee12d95df84"} Dec 03 07:47:35 crc kubenswrapper[4612]: I1203 07:47:35.199564 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s5xlq"] Dec 03 07:47:36 crc kubenswrapper[4612]: I1203 07:47:36.054210 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s5xlq" event={"ID":"a0982d29-f1b5-4dd6-b87b-a70a08e54712","Type":"ContainerStarted","Data":"96231eca110fd3215d16c0bab4de17fae65d15d898792760701a5d38ed1d10cc"} Dec 03 07:47:36 crc kubenswrapper[4612]: I1203 07:47:36.054249 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s5xlq" event={"ID":"a0982d29-f1b5-4dd6-b87b-a70a08e54712","Type":"ContainerStarted","Data":"a0f1c9b2041e9ab7ccb1b97671f98abc280cb6e80239020a0ebf86485cf7a68b"} Dec 03 07:47:36 crc kubenswrapper[4612]: I1203 07:47:36.057183 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" event={"ID":"bdbc5b1d-28f6-46f0-8cca-4a163110d00e","Type":"ContainerStarted","Data":"531b7597e566158de1081d8456b89e683c201062f7ae557ceb4b334405fd56a8"} Dec 03 07:47:36 crc kubenswrapper[4612]: I1203 07:47:36.057671 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:36 crc kubenswrapper[4612]: I1203 07:47:36.077160 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-s5xlq" podStartSLOduration=2.07714059 podStartE2EDuration="2.07714059s" podCreationTimestamp="2025-12-03 07:47:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:47:36.067657256 +0000 UTC m=+1219.241014666" watchObservedRunningTime="2025-12-03 07:47:36.07714059 +0000 UTC m=+1219.250497990" Dec 03 07:47:36 crc kubenswrapper[4612]: I1203 07:47:36.096858 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" podStartSLOduration=4.096839287 podStartE2EDuration="4.096839287s" podCreationTimestamp="2025-12-03 07:47:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:47:36.086615225 +0000 UTC m=+1219.259972655" watchObservedRunningTime="2025-12-03 07:47:36.096839287 +0000 UTC m=+1219.270196707" Dec 03 07:47:36 crc kubenswrapper[4612]: I1203 07:47:36.738475 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 07:47:36 crc kubenswrapper[4612]: I1203 07:47:36.752146 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:47:39 crc kubenswrapper[4612]: I1203 07:47:39.107428 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f876fbab-9c17-44f0-b708-27d573e171e8","Type":"ContainerStarted","Data":"5c63a8f3b4bc08ea4bf19d72dc1b730f487fb5646a44e512450e971f2f435641"} Dec 03 07:47:39 crc kubenswrapper[4612]: I1203 07:47:39.107859 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="f876fbab-9c17-44f0-b708-27d573e171e8" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://5c63a8f3b4bc08ea4bf19d72dc1b730f487fb5646a44e512450e971f2f435641" gracePeriod=30 Dec 03 07:47:39 crc kubenswrapper[4612]: I1203 07:47:39.117979 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1d96560d-e8ba-436b-b4a9-f157564a0022","Type":"ContainerStarted","Data":"35832863b91872f08668a71898a86aa1ca77e18a982e168ff9c1649a0d727e74"} Dec 03 07:47:39 crc kubenswrapper[4612]: I1203 07:47:39.118025 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1d96560d-e8ba-436b-b4a9-f157564a0022","Type":"ContainerStarted","Data":"6bf7732d10a5b2b08dcca230c5417ed0dce12b823ab83890c76288b2dabe1840"} Dec 03 07:47:39 crc kubenswrapper[4612]: I1203 07:47:39.118037 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="1d96560d-e8ba-436b-b4a9-f157564a0022" containerName="nova-metadata-log" containerID="cri-o://6bf7732d10a5b2b08dcca230c5417ed0dce12b823ab83890c76288b2dabe1840" gracePeriod=30 Dec 03 07:47:39 crc kubenswrapper[4612]: I1203 07:47:39.118106 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="1d96560d-e8ba-436b-b4a9-f157564a0022" containerName="nova-metadata-metadata" containerID="cri-o://35832863b91872f08668a71898a86aa1ca77e18a982e168ff9c1649a0d727e74" gracePeriod=30 Dec 03 07:47:39 crc kubenswrapper[4612]: I1203 07:47:39.127535 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.61785776 podStartE2EDuration="7.127522638s" podCreationTimestamp="2025-12-03 07:47:32 +0000 UTC" firstStartedPulling="2025-12-03 07:47:33.853524108 +0000 UTC m=+1217.026881508" lastFinishedPulling="2025-12-03 07:47:38.363188966 +0000 UTC m=+1221.536546386" observedRunningTime="2025-12-03 07:47:39.126567805 +0000 UTC m=+1222.299925205" watchObservedRunningTime="2025-12-03 07:47:39.127522638 +0000 UTC m=+1222.300880038" Dec 03 07:47:39 crc kubenswrapper[4612]: I1203 07:47:39.140563 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47ca23ae-a3cf-4fbb-b2d8-41d188321af5","Type":"ContainerStarted","Data":"93f400f5cd4b6c2ffc90f6f7e7357dfac8a4fa5019c888ad8d4bb0a365296bee"} Dec 03 07:47:39 crc kubenswrapper[4612]: I1203 07:47:39.140605 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47ca23ae-a3cf-4fbb-b2d8-41d188321af5","Type":"ContainerStarted","Data":"a0ee18004b3f80d2eed74448ba790a14b962c6c4dd7edfc82be4699133031313"} Dec 03 07:47:39 crc kubenswrapper[4612]: I1203 07:47:39.149794 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d673a97-69bf-4125-9b73-d9b79c8f4ab0","Type":"ContainerStarted","Data":"9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5"} Dec 03 07:47:39 crc kubenswrapper[4612]: I1203 07:47:39.157621 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.970296396 podStartE2EDuration="7.157603592s" podCreationTimestamp="2025-12-03 07:47:32 +0000 UTC" firstStartedPulling="2025-12-03 07:47:34.178147796 +0000 UTC m=+1217.351505206" lastFinishedPulling="2025-12-03 07:47:38.365454982 +0000 UTC m=+1221.538812402" observedRunningTime="2025-12-03 07:47:39.149496212 +0000 UTC m=+1222.322853612" watchObservedRunningTime="2025-12-03 07:47:39.157603592 +0000 UTC m=+1222.330960982" Dec 03 07:47:39 crc kubenswrapper[4612]: I1203 07:47:39.174858 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.423753479 podStartE2EDuration="7.174843858s" podCreationTimestamp="2025-12-03 07:47:32 +0000 UTC" firstStartedPulling="2025-12-03 07:47:33.614310512 +0000 UTC m=+1216.787667912" lastFinishedPulling="2025-12-03 07:47:38.365400881 +0000 UTC m=+1221.538758291" observedRunningTime="2025-12-03 07:47:39.172512731 +0000 UTC m=+1222.345870131" watchObservedRunningTime="2025-12-03 07:47:39.174843858 +0000 UTC m=+1222.348201258" Dec 03 07:47:39 crc kubenswrapper[4612]: I1203 07:47:39.203059 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.655805698 podStartE2EDuration="7.203039686s" podCreationTimestamp="2025-12-03 07:47:32 +0000 UTC" firstStartedPulling="2025-12-03 07:47:33.818165803 +0000 UTC m=+1216.991523193" lastFinishedPulling="2025-12-03 07:47:38.365399781 +0000 UTC m=+1221.538757181" observedRunningTime="2025-12-03 07:47:39.197274083 +0000 UTC m=+1222.370631473" watchObservedRunningTime="2025-12-03 07:47:39.203039686 +0000 UTC m=+1222.376397086" Dec 03 07:47:40 crc kubenswrapper[4612]: I1203 07:47:40.159555 4612 generic.go:334] "Generic (PLEG): container finished" podID="1d96560d-e8ba-436b-b4a9-f157564a0022" containerID="6bf7732d10a5b2b08dcca230c5417ed0dce12b823ab83890c76288b2dabe1840" exitCode=143 Dec 03 07:47:40 crc kubenswrapper[4612]: I1203 07:47:40.159634 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1d96560d-e8ba-436b-b4a9-f157564a0022","Type":"ContainerDied","Data":"6bf7732d10a5b2b08dcca230c5417ed0dce12b823ab83890c76288b2dabe1840"} Dec 03 07:47:40 crc kubenswrapper[4612]: I1203 07:47:40.777814 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-767d79bd88-5spkc" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 03 07:47:42 crc kubenswrapper[4612]: I1203 07:47:42.892621 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 07:47:42 crc kubenswrapper[4612]: I1203 07:47:42.894390 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 07:47:42 crc kubenswrapper[4612]: I1203 07:47:42.987056 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 07:47:42 crc kubenswrapper[4612]: I1203 07:47:42.987162 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 07:47:43 crc kubenswrapper[4612]: I1203 07:47:43.021720 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:47:43 crc kubenswrapper[4612]: I1203 07:47:43.027238 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 07:47:43 crc kubenswrapper[4612]: I1203 07:47:43.118002 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 07:47:43 crc kubenswrapper[4612]: I1203 07:47:43.118042 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 07:47:43 crc kubenswrapper[4612]: I1203 07:47:43.192177 4612 generic.go:334] "Generic (PLEG): container finished" podID="a0982d29-f1b5-4dd6-b87b-a70a08e54712" containerID="96231eca110fd3215d16c0bab4de17fae65d15d898792760701a5d38ed1d10cc" exitCode=0 Dec 03 07:47:43 crc kubenswrapper[4612]: I1203 07:47:43.192195 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s5xlq" event={"ID":"a0982d29-f1b5-4dd6-b87b-a70a08e54712","Type":"ContainerDied","Data":"96231eca110fd3215d16c0bab4de17fae65d15d898792760701a5d38ed1d10cc"} Dec 03 07:47:43 crc kubenswrapper[4612]: I1203 07:47:43.224580 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 07:47:43 crc kubenswrapper[4612]: I1203 07:47:43.492891 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:47:43 crc kubenswrapper[4612]: I1203 07:47:43.608700 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-j4wbq"] Dec 03 07:47:43 crc kubenswrapper[4612]: I1203 07:47:43.608988 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" podUID="ac049611-199b-46af-ac88-42736bbf522d" containerName="dnsmasq-dns" containerID="cri-o://c13073aeadea899f842d6536a2fdbbdb1ff96e3954511c5249ed62939427b063" gracePeriod=10 Dec 03 07:47:43 crc kubenswrapper[4612]: I1203 07:47:43.977167 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="47ca23ae-a3cf-4fbb-b2d8-41d188321af5" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.183:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 07:47:43 crc kubenswrapper[4612]: I1203 07:47:43.977825 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="47ca23ae-a3cf-4fbb-b2d8-41d188321af5" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.183:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.225369 4612 generic.go:334] "Generic (PLEG): container finished" podID="f97e1ed8-5ec1-4b29-bb43-8ff083052bfd" containerID="c2c9d678cfcac6cd47687aaf57705cf766653520efd2f5c46ca38c7a6ea0123f" exitCode=0 Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.225648 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-q746f" event={"ID":"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd","Type":"ContainerDied","Data":"c2c9d678cfcac6cd47687aaf57705cf766653520efd2f5c46ca38c7a6ea0123f"} Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.230011 4612 generic.go:334] "Generic (PLEG): container finished" podID="ac049611-199b-46af-ac88-42736bbf522d" containerID="c13073aeadea899f842d6536a2fdbbdb1ff96e3954511c5249ed62939427b063" exitCode=0 Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.230120 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" event={"ID":"ac049611-199b-46af-ac88-42736bbf522d","Type":"ContainerDied","Data":"c13073aeadea899f842d6536a2fdbbdb1ff96e3954511c5249ed62939427b063"} Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.230176 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" event={"ID":"ac049611-199b-46af-ac88-42736bbf522d","Type":"ContainerDied","Data":"a8219a496466337c5da879b9e107a05b0712a5b8c1f38fd0e5d9a67170cdf7be"} Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.230189 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8219a496466337c5da879b9e107a05b0712a5b8c1f38fd0e5d9a67170cdf7be" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.236638 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.285364 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-ovsdbserver-nb\") pod \"ac049611-199b-46af-ac88-42736bbf522d\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.285467 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-config\") pod \"ac049611-199b-46af-ac88-42736bbf522d\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.285485 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-dns-svc\") pod \"ac049611-199b-46af-ac88-42736bbf522d\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.285527 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-ovsdbserver-sb\") pod \"ac049611-199b-46af-ac88-42736bbf522d\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.285576 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tptqk\" (UniqueName: \"kubernetes.io/projected/ac049611-199b-46af-ac88-42736bbf522d-kube-api-access-tptqk\") pod \"ac049611-199b-46af-ac88-42736bbf522d\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.285603 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-dns-swift-storage-0\") pod \"ac049611-199b-46af-ac88-42736bbf522d\" (UID: \"ac049611-199b-46af-ac88-42736bbf522d\") " Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.305840 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac049611-199b-46af-ac88-42736bbf522d-kube-api-access-tptqk" (OuterVolumeSpecName: "kube-api-access-tptqk") pod "ac049611-199b-46af-ac88-42736bbf522d" (UID: "ac049611-199b-46af-ac88-42736bbf522d"). InnerVolumeSpecName "kube-api-access-tptqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.383274 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ac049611-199b-46af-ac88-42736bbf522d" (UID: "ac049611-199b-46af-ac88-42736bbf522d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.387287 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.387307 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tptqk\" (UniqueName: \"kubernetes.io/projected/ac049611-199b-46af-ac88-42736bbf522d-kube-api-access-tptqk\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.412154 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ac049611-199b-46af-ac88-42736bbf522d" (UID: "ac049611-199b-46af-ac88-42736bbf522d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.414222 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-config" (OuterVolumeSpecName: "config") pod "ac049611-199b-46af-ac88-42736bbf522d" (UID: "ac049611-199b-46af-ac88-42736bbf522d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.431655 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ac049611-199b-46af-ac88-42736bbf522d" (UID: "ac049611-199b-46af-ac88-42736bbf522d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.438893 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ac049611-199b-46af-ac88-42736bbf522d" (UID: "ac049611-199b-46af-ac88-42736bbf522d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.488851 4612 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.488875 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.488886 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.488894 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac049611-199b-46af-ac88-42736bbf522d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.586528 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.691933 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-combined-ca-bundle\") pod \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.692055 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-scripts\") pod \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.692390 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-config-data\") pod \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.692526 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8f7p\" (UniqueName: \"kubernetes.io/projected/a0982d29-f1b5-4dd6-b87b-a70a08e54712-kube-api-access-x8f7p\") pod \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\" (UID: \"a0982d29-f1b5-4dd6-b87b-a70a08e54712\") " Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.696308 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0982d29-f1b5-4dd6-b87b-a70a08e54712-kube-api-access-x8f7p" (OuterVolumeSpecName: "kube-api-access-x8f7p") pod "a0982d29-f1b5-4dd6-b87b-a70a08e54712" (UID: "a0982d29-f1b5-4dd6-b87b-a70a08e54712"). InnerVolumeSpecName "kube-api-access-x8f7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.698053 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-scripts" (OuterVolumeSpecName: "scripts") pod "a0982d29-f1b5-4dd6-b87b-a70a08e54712" (UID: "a0982d29-f1b5-4dd6-b87b-a70a08e54712"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.726251 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a0982d29-f1b5-4dd6-b87b-a70a08e54712" (UID: "a0982d29-f1b5-4dd6-b87b-a70a08e54712"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.732016 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-config-data" (OuterVolumeSpecName: "config-data") pod "a0982d29-f1b5-4dd6-b87b-a70a08e54712" (UID: "a0982d29-f1b5-4dd6-b87b-a70a08e54712"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.795066 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8f7p\" (UniqueName: \"kubernetes.io/projected/a0982d29-f1b5-4dd6-b87b-a70a08e54712-kube-api-access-x8f7p\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.795104 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.795117 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:44 crc kubenswrapper[4612]: I1203 07:47:44.795128 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0982d29-f1b5-4dd6-b87b-a70a08e54712-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.240196 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-j4wbq" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.240195 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s5xlq" event={"ID":"a0982d29-f1b5-4dd6-b87b-a70a08e54712","Type":"ContainerDied","Data":"a0f1c9b2041e9ab7ccb1b97671f98abc280cb6e80239020a0ebf86485cf7a68b"} Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.241182 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0f1c9b2041e9ab7ccb1b97671f98abc280cb6e80239020a0ebf86485cf7a68b" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.240247 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s5xlq" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.272779 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-j4wbq"] Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.285171 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-j4wbq"] Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.300181 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 07:47:45 crc kubenswrapper[4612]: E1203 07:47:45.300698 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac049611-199b-46af-ac88-42736bbf522d" containerName="init" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.300721 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac049611-199b-46af-ac88-42736bbf522d" containerName="init" Dec 03 07:47:45 crc kubenswrapper[4612]: E1203 07:47:45.300749 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0982d29-f1b5-4dd6-b87b-a70a08e54712" containerName="nova-cell1-conductor-db-sync" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.300758 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0982d29-f1b5-4dd6-b87b-a70a08e54712" containerName="nova-cell1-conductor-db-sync" Dec 03 07:47:45 crc kubenswrapper[4612]: E1203 07:47:45.300787 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac049611-199b-46af-ac88-42736bbf522d" containerName="dnsmasq-dns" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.300798 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac049611-199b-46af-ac88-42736bbf522d" containerName="dnsmasq-dns" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.301039 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac049611-199b-46af-ac88-42736bbf522d" containerName="dnsmasq-dns" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.301077 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0982d29-f1b5-4dd6-b87b-a70a08e54712" containerName="nova-cell1-conductor-db-sync" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.301876 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.307037 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.311188 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.408007 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpmwz\" (UniqueName: \"kubernetes.io/projected/cee4bc96-2455-41b3-a3cc-d743fab6e761-kube-api-access-xpmwz\") pod \"nova-cell1-conductor-0\" (UID: \"cee4bc96-2455-41b3-a3cc-d743fab6e761\") " pod="openstack/nova-cell1-conductor-0" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.408075 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cee4bc96-2455-41b3-a3cc-d743fab6e761-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"cee4bc96-2455-41b3-a3cc-d743fab6e761\") " pod="openstack/nova-cell1-conductor-0" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.408157 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cee4bc96-2455-41b3-a3cc-d743fab6e761-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"cee4bc96-2455-41b3-a3cc-d743fab6e761\") " pod="openstack/nova-cell1-conductor-0" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.518122 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cee4bc96-2455-41b3-a3cc-d743fab6e761-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"cee4bc96-2455-41b3-a3cc-d743fab6e761\") " pod="openstack/nova-cell1-conductor-0" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.518282 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpmwz\" (UniqueName: \"kubernetes.io/projected/cee4bc96-2455-41b3-a3cc-d743fab6e761-kube-api-access-xpmwz\") pod \"nova-cell1-conductor-0\" (UID: \"cee4bc96-2455-41b3-a3cc-d743fab6e761\") " pod="openstack/nova-cell1-conductor-0" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.518379 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cee4bc96-2455-41b3-a3cc-d743fab6e761-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"cee4bc96-2455-41b3-a3cc-d743fab6e761\") " pod="openstack/nova-cell1-conductor-0" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.539898 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cee4bc96-2455-41b3-a3cc-d743fab6e761-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"cee4bc96-2455-41b3-a3cc-d743fab6e761\") " pod="openstack/nova-cell1-conductor-0" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.542746 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cee4bc96-2455-41b3-a3cc-d743fab6e761-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"cee4bc96-2455-41b3-a3cc-d743fab6e761\") " pod="openstack/nova-cell1-conductor-0" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.543256 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpmwz\" (UniqueName: \"kubernetes.io/projected/cee4bc96-2455-41b3-a3cc-d743fab6e761-kube-api-access-xpmwz\") pod \"nova-cell1-conductor-0\" (UID: \"cee4bc96-2455-41b3-a3cc-d743fab6e761\") " pod="openstack/nova-cell1-conductor-0" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.624827 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.636303 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.735238 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-scripts\") pod \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.735996 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrgr8\" (UniqueName: \"kubernetes.io/projected/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-kube-api-access-nrgr8\") pod \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.736068 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-config-data\") pod \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.736189 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-combined-ca-bundle\") pod \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\" (UID: \"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd\") " Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.747972 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-scripts" (OuterVolumeSpecName: "scripts") pod "f97e1ed8-5ec1-4b29-bb43-8ff083052bfd" (UID: "f97e1ed8-5ec1-4b29-bb43-8ff083052bfd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.766614 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-kube-api-access-nrgr8" (OuterVolumeSpecName: "kube-api-access-nrgr8") pod "f97e1ed8-5ec1-4b29-bb43-8ff083052bfd" (UID: "f97e1ed8-5ec1-4b29-bb43-8ff083052bfd"). InnerVolumeSpecName "kube-api-access-nrgr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.812098 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-config-data" (OuterVolumeSpecName: "config-data") pod "f97e1ed8-5ec1-4b29-bb43-8ff083052bfd" (UID: "f97e1ed8-5ec1-4b29-bb43-8ff083052bfd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.812124 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f97e1ed8-5ec1-4b29-bb43-8ff083052bfd" (UID: "f97e1ed8-5ec1-4b29-bb43-8ff083052bfd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.840528 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.840562 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrgr8\" (UniqueName: \"kubernetes.io/projected/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-kube-api-access-nrgr8\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.840577 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:45 crc kubenswrapper[4612]: I1203 07:47:45.840596 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.152968 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.258340 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"cee4bc96-2455-41b3-a3cc-d743fab6e761","Type":"ContainerStarted","Data":"57046f034c5bf806a4ad91e468aa6afe7a4bb09aab6f1a86e774596211ea8ba3"} Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.270273 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-q746f" event={"ID":"f97e1ed8-5ec1-4b29-bb43-8ff083052bfd","Type":"ContainerDied","Data":"e872d41f76f6ab2f3893dc9b81b1cdea804507811554f90c503c6e0a39d718dc"} Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.270328 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e872d41f76f6ab2f3893dc9b81b1cdea804507811554f90c503c6e0a39d718dc" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.270410 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-q746f" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.273650 4612 generic.go:334] "Generic (PLEG): container finished" podID="7462fb55-15b7-4416-b34f-23893766b5ed" containerID="5f382c79b6c5dc2c810237d3351ca896247a02c86f54a420b90aa489990e83c8" exitCode=137 Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.273681 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-767d79bd88-5spkc" event={"ID":"7462fb55-15b7-4416-b34f-23893766b5ed","Type":"ContainerDied","Data":"5f382c79b6c5dc2c810237d3351ca896247a02c86f54a420b90aa489990e83c8"} Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.320152 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.431393 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.431621 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="47ca23ae-a3cf-4fbb-b2d8-41d188321af5" containerName="nova-api-log" containerID="cri-o://a0ee18004b3f80d2eed74448ba790a14b962c6c4dd7edfc82be4699133031313" gracePeriod=30 Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.431818 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="47ca23ae-a3cf-4fbb-b2d8-41d188321af5" containerName="nova-api-api" containerID="cri-o://93f400f5cd4b6c2ffc90f6f7e7357dfac8a4fa5019c888ad8d4bb0a365296bee" gracePeriod=30 Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.446483 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.446669 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="0d673a97-69bf-4125-9b73-d9b79c8f4ab0" containerName="nova-scheduler-scheduler" containerID="cri-o://9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5" gracePeriod=30 Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.455010 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7462fb55-15b7-4416-b34f-23893766b5ed-logs\") pod \"7462fb55-15b7-4416-b34f-23893766b5ed\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.455288 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7462fb55-15b7-4416-b34f-23893766b5ed-config-data\") pod \"7462fb55-15b7-4416-b34f-23893766b5ed\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.455323 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8xmq\" (UniqueName: \"kubernetes.io/projected/7462fb55-15b7-4416-b34f-23893766b5ed-kube-api-access-l8xmq\") pod \"7462fb55-15b7-4416-b34f-23893766b5ed\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.455410 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-horizon-tls-certs\") pod \"7462fb55-15b7-4416-b34f-23893766b5ed\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.455453 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-horizon-secret-key\") pod \"7462fb55-15b7-4416-b34f-23893766b5ed\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.456036 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7462fb55-15b7-4416-b34f-23893766b5ed-scripts\") pod \"7462fb55-15b7-4416-b34f-23893766b5ed\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.456060 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7462fb55-15b7-4416-b34f-23893766b5ed-logs" (OuterVolumeSpecName: "logs") pod "7462fb55-15b7-4416-b34f-23893766b5ed" (UID: "7462fb55-15b7-4416-b34f-23893766b5ed"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.456089 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-combined-ca-bundle\") pod \"7462fb55-15b7-4416-b34f-23893766b5ed\" (UID: \"7462fb55-15b7-4416-b34f-23893766b5ed\") " Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.456440 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7462fb55-15b7-4416-b34f-23893766b5ed-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.464938 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7462fb55-15b7-4416-b34f-23893766b5ed-kube-api-access-l8xmq" (OuterVolumeSpecName: "kube-api-access-l8xmq") pod "7462fb55-15b7-4416-b34f-23893766b5ed" (UID: "7462fb55-15b7-4416-b34f-23893766b5ed"). InnerVolumeSpecName "kube-api-access-l8xmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.465217 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "7462fb55-15b7-4416-b34f-23893766b5ed" (UID: "7462fb55-15b7-4416-b34f-23893766b5ed"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.484047 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7462fb55-15b7-4416-b34f-23893766b5ed-scripts" (OuterVolumeSpecName: "scripts") pod "7462fb55-15b7-4416-b34f-23893766b5ed" (UID: "7462fb55-15b7-4416-b34f-23893766b5ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.491114 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7462fb55-15b7-4416-b34f-23893766b5ed-config-data" (OuterVolumeSpecName: "config-data") pod "7462fb55-15b7-4416-b34f-23893766b5ed" (UID: "7462fb55-15b7-4416-b34f-23893766b5ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.511318 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7462fb55-15b7-4416-b34f-23893766b5ed" (UID: "7462fb55-15b7-4416-b34f-23893766b5ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.557094 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "7462fb55-15b7-4416-b34f-23893766b5ed" (UID: "7462fb55-15b7-4416-b34f-23893766b5ed"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.558871 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7462fb55-15b7-4416-b34f-23893766b5ed-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.558905 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8xmq\" (UniqueName: \"kubernetes.io/projected/7462fb55-15b7-4416-b34f-23893766b5ed-kube-api-access-l8xmq\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.558917 4612 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.558926 4612 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.558935 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7462fb55-15b7-4416-b34f-23893766b5ed-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:46 crc kubenswrapper[4612]: I1203 07:47:46.558945 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7462fb55-15b7-4416-b34f-23893766b5ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:47 crc kubenswrapper[4612]: I1203 07:47:47.104288 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac049611-199b-46af-ac88-42736bbf522d" path="/var/lib/kubelet/pods/ac049611-199b-46af-ac88-42736bbf522d/volumes" Dec 03 07:47:47 crc kubenswrapper[4612]: I1203 07:47:47.292159 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-767d79bd88-5spkc" event={"ID":"7462fb55-15b7-4416-b34f-23893766b5ed","Type":"ContainerDied","Data":"ecd5a2b56e26e33abfea2e53aca51a098323619eb518b12f77c4f68f7e07cd21"} Dec 03 07:47:47 crc kubenswrapper[4612]: I1203 07:47:47.292203 4612 scope.go:117] "RemoveContainer" containerID="6845b3347b561dff2c7aaf840fb49d55a2770102726dd8f15710394a1f37b660" Dec 03 07:47:47 crc kubenswrapper[4612]: I1203 07:47:47.292213 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-767d79bd88-5spkc" Dec 03 07:47:47 crc kubenswrapper[4612]: I1203 07:47:47.306764 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"cee4bc96-2455-41b3-a3cc-d743fab6e761","Type":"ContainerStarted","Data":"cc14ea44737fb6f043aa7e3938f5a9cecc94dac7a9ee9c6cfc0a10e405de400b"} Dec 03 07:47:47 crc kubenswrapper[4612]: I1203 07:47:47.306854 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 03 07:47:47 crc kubenswrapper[4612]: I1203 07:47:47.310892 4612 generic.go:334] "Generic (PLEG): container finished" podID="47ca23ae-a3cf-4fbb-b2d8-41d188321af5" containerID="a0ee18004b3f80d2eed74448ba790a14b962c6c4dd7edfc82be4699133031313" exitCode=143 Dec 03 07:47:47 crc kubenswrapper[4612]: I1203 07:47:47.310963 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47ca23ae-a3cf-4fbb-b2d8-41d188321af5","Type":"ContainerDied","Data":"a0ee18004b3f80d2eed74448ba790a14b962c6c4dd7edfc82be4699133031313"} Dec 03 07:47:47 crc kubenswrapper[4612]: I1203 07:47:47.325775 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-767d79bd88-5spkc"] Dec 03 07:47:47 crc kubenswrapper[4612]: I1203 07:47:47.337875 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-767d79bd88-5spkc"] Dec 03 07:47:47 crc kubenswrapper[4612]: I1203 07:47:47.346319 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.346301203 podStartE2EDuration="2.346301203s" podCreationTimestamp="2025-12-03 07:47:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:47:47.332805089 +0000 UTC m=+1230.506162489" watchObservedRunningTime="2025-12-03 07:47:47.346301203 +0000 UTC m=+1230.519658603" Dec 03 07:47:47 crc kubenswrapper[4612]: I1203 07:47:47.481809 4612 scope.go:117] "RemoveContainer" containerID="5f382c79b6c5dc2c810237d3351ca896247a02c86f54a420b90aa489990e83c8" Dec 03 07:47:47 crc kubenswrapper[4612]: E1203 07:47:47.988629 4612 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 07:47:47 crc kubenswrapper[4612]: E1203 07:47:47.989704 4612 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 07:47:47 crc kubenswrapper[4612]: E1203 07:47:47.990739 4612 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 07:47:47 crc kubenswrapper[4612]: E1203 07:47:47.990774 4612 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="0d673a97-69bf-4125-9b73-d9b79c8f4ab0" containerName="nova-scheduler-scheduler" Dec 03 07:47:49 crc kubenswrapper[4612]: I1203 07:47:49.104975 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" path="/var/lib/kubelet/pods/7462fb55-15b7-4416-b34f-23893766b5ed/volumes" Dec 03 07:47:50 crc kubenswrapper[4612]: I1203 07:47:50.734000 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 07:47:50 crc kubenswrapper[4612]: I1203 07:47:50.845914 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fx9dj\" (UniqueName: \"kubernetes.io/projected/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-kube-api-access-fx9dj\") pod \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\" (UID: \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\") " Dec 03 07:47:50 crc kubenswrapper[4612]: I1203 07:47:50.846343 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-config-data\") pod \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\" (UID: \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\") " Dec 03 07:47:50 crc kubenswrapper[4612]: I1203 07:47:50.846470 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-combined-ca-bundle\") pod \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\" (UID: \"0d673a97-69bf-4125-9b73-d9b79c8f4ab0\") " Dec 03 07:47:50 crc kubenswrapper[4612]: I1203 07:47:50.864809 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-kube-api-access-fx9dj" (OuterVolumeSpecName: "kube-api-access-fx9dj") pod "0d673a97-69bf-4125-9b73-d9b79c8f4ab0" (UID: "0d673a97-69bf-4125-9b73-d9b79c8f4ab0"). InnerVolumeSpecName "kube-api-access-fx9dj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:47:50 crc kubenswrapper[4612]: I1203 07:47:50.888131 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d673a97-69bf-4125-9b73-d9b79c8f4ab0" (UID: "0d673a97-69bf-4125-9b73-d9b79c8f4ab0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:50 crc kubenswrapper[4612]: I1203 07:47:50.892406 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-config-data" (OuterVolumeSpecName: "config-data") pod "0d673a97-69bf-4125-9b73-d9b79c8f4ab0" (UID: "0d673a97-69bf-4125-9b73-d9b79c8f4ab0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:50 crc kubenswrapper[4612]: I1203 07:47:50.948789 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fx9dj\" (UniqueName: \"kubernetes.io/projected/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-kube-api-access-fx9dj\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:50 crc kubenswrapper[4612]: I1203 07:47:50.948857 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:50 crc kubenswrapper[4612]: I1203 07:47:50.948873 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d673a97-69bf-4125-9b73-d9b79c8f4ab0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.215511 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.257232 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gcl2\" (UniqueName: \"kubernetes.io/projected/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-kube-api-access-9gcl2\") pod \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.257492 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-logs\") pod \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.257565 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-config-data\") pod \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.257758 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-combined-ca-bundle\") pod \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\" (UID: \"47ca23ae-a3cf-4fbb-b2d8-41d188321af5\") " Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.258243 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-logs" (OuterVolumeSpecName: "logs") pod "47ca23ae-a3cf-4fbb-b2d8-41d188321af5" (UID: "47ca23ae-a3cf-4fbb-b2d8-41d188321af5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.271746 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-kube-api-access-9gcl2" (OuterVolumeSpecName: "kube-api-access-9gcl2") pod "47ca23ae-a3cf-4fbb-b2d8-41d188321af5" (UID: "47ca23ae-a3cf-4fbb-b2d8-41d188321af5"). InnerVolumeSpecName "kube-api-access-9gcl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.287791 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47ca23ae-a3cf-4fbb-b2d8-41d188321af5" (UID: "47ca23ae-a3cf-4fbb-b2d8-41d188321af5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.300465 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-config-data" (OuterVolumeSpecName: "config-data") pod "47ca23ae-a3cf-4fbb-b2d8-41d188321af5" (UID: "47ca23ae-a3cf-4fbb-b2d8-41d188321af5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.347040 4612 generic.go:334] "Generic (PLEG): container finished" podID="0d673a97-69bf-4125-9b73-d9b79c8f4ab0" containerID="9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5" exitCode=0 Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.347095 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d673a97-69bf-4125-9b73-d9b79c8f4ab0","Type":"ContainerDied","Data":"9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5"} Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.347122 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d673a97-69bf-4125-9b73-d9b79c8f4ab0","Type":"ContainerDied","Data":"0a4ff52384e8850751241618d3641947704b37bcf530c5a90a42990f905f56cb"} Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.347137 4612 scope.go:117] "RemoveContainer" containerID="9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.347238 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.350672 4612 generic.go:334] "Generic (PLEG): container finished" podID="47ca23ae-a3cf-4fbb-b2d8-41d188321af5" containerID="93f400f5cd4b6c2ffc90f6f7e7357dfac8a4fa5019c888ad8d4bb0a365296bee" exitCode=0 Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.350712 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47ca23ae-a3cf-4fbb-b2d8-41d188321af5","Type":"ContainerDied","Data":"93f400f5cd4b6c2ffc90f6f7e7357dfac8a4fa5019c888ad8d4bb0a365296bee"} Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.350737 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47ca23ae-a3cf-4fbb-b2d8-41d188321af5","Type":"ContainerDied","Data":"6918b07fab7677dc5fdf2fcceaab4bf592b272c6bb57899e0d846913564237dc"} Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.350789 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.361749 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.361768 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.361777 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.361787 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gcl2\" (UniqueName: \"kubernetes.io/projected/47ca23ae-a3cf-4fbb-b2d8-41d188321af5-kube-api-access-9gcl2\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.375215 4612 scope.go:117] "RemoveContainer" containerID="9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5" Dec 03 07:47:51 crc kubenswrapper[4612]: E1203 07:47:51.375647 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5\": container with ID starting with 9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5 not found: ID does not exist" containerID="9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.375698 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5"} err="failed to get container status \"9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5\": rpc error: code = NotFound desc = could not find container \"9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5\": container with ID starting with 9bd713d71849872f1a4ad99c2184e5eb194fd1131a9bcff2710cc6749fd393c5 not found: ID does not exist" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.375717 4612 scope.go:117] "RemoveContainer" containerID="93f400f5cd4b6c2ffc90f6f7e7357dfac8a4fa5019c888ad8d4bb0a365296bee" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.380110 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.388353 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.448043 4612 scope.go:117] "RemoveContainer" containerID="a0ee18004b3f80d2eed74448ba790a14b962c6c4dd7edfc82be4699133031313" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.451268 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.466822 4612 scope.go:117] "RemoveContainer" containerID="93f400f5cd4b6c2ffc90f6f7e7357dfac8a4fa5019c888ad8d4bb0a365296bee" Dec 03 07:47:51 crc kubenswrapper[4612]: E1203 07:47:51.469173 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93f400f5cd4b6c2ffc90f6f7e7357dfac8a4fa5019c888ad8d4bb0a365296bee\": container with ID starting with 93f400f5cd4b6c2ffc90f6f7e7357dfac8a4fa5019c888ad8d4bb0a365296bee not found: ID does not exist" containerID="93f400f5cd4b6c2ffc90f6f7e7357dfac8a4fa5019c888ad8d4bb0a365296bee" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.469207 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93f400f5cd4b6c2ffc90f6f7e7357dfac8a4fa5019c888ad8d4bb0a365296bee"} err="failed to get container status \"93f400f5cd4b6c2ffc90f6f7e7357dfac8a4fa5019c888ad8d4bb0a365296bee\": rpc error: code = NotFound desc = could not find container \"93f400f5cd4b6c2ffc90f6f7e7357dfac8a4fa5019c888ad8d4bb0a365296bee\": container with ID starting with 93f400f5cd4b6c2ffc90f6f7e7357dfac8a4fa5019c888ad8d4bb0a365296bee not found: ID does not exist" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.469230 4612 scope.go:117] "RemoveContainer" containerID="a0ee18004b3f80d2eed74448ba790a14b962c6c4dd7edfc82be4699133031313" Dec 03 07:47:51 crc kubenswrapper[4612]: E1203 07:47:51.470000 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0ee18004b3f80d2eed74448ba790a14b962c6c4dd7edfc82be4699133031313\": container with ID starting with a0ee18004b3f80d2eed74448ba790a14b962c6c4dd7edfc82be4699133031313 not found: ID does not exist" containerID="a0ee18004b3f80d2eed74448ba790a14b962c6c4dd7edfc82be4699133031313" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.470033 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0ee18004b3f80d2eed74448ba790a14b962c6c4dd7edfc82be4699133031313"} err="failed to get container status \"a0ee18004b3f80d2eed74448ba790a14b962c6c4dd7edfc82be4699133031313\": rpc error: code = NotFound desc = could not find container \"a0ee18004b3f80d2eed74448ba790a14b962c6c4dd7edfc82be4699133031313\": container with ID starting with a0ee18004b3f80d2eed74448ba790a14b962c6c4dd7edfc82be4699133031313 not found: ID does not exist" Dec 03 07:47:51 crc kubenswrapper[4612]: I1203 07:47:51.471536 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.060796 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:47:52 crc kubenswrapper[4612]: E1203 07:47:52.061171 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47ca23ae-a3cf-4fbb-b2d8-41d188321af5" containerName="nova-api-log" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.061185 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="47ca23ae-a3cf-4fbb-b2d8-41d188321af5" containerName="nova-api-log" Dec 03 07:47:52 crc kubenswrapper[4612]: E1203 07:47:52.061206 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.061212 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" Dec 03 07:47:52 crc kubenswrapper[4612]: E1203 07:47:52.061225 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon-log" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.061232 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon-log" Dec 03 07:47:52 crc kubenswrapper[4612]: E1203 07:47:52.061246 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f97e1ed8-5ec1-4b29-bb43-8ff083052bfd" containerName="nova-manage" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.061251 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f97e1ed8-5ec1-4b29-bb43-8ff083052bfd" containerName="nova-manage" Dec 03 07:47:52 crc kubenswrapper[4612]: E1203 07:47:52.061265 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47ca23ae-a3cf-4fbb-b2d8-41d188321af5" containerName="nova-api-api" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.061270 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="47ca23ae-a3cf-4fbb-b2d8-41d188321af5" containerName="nova-api-api" Dec 03 07:47:52 crc kubenswrapper[4612]: E1203 07:47:52.061287 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d673a97-69bf-4125-9b73-d9b79c8f4ab0" containerName="nova-scheduler-scheduler" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.061293 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d673a97-69bf-4125-9b73-d9b79c8f4ab0" containerName="nova-scheduler-scheduler" Dec 03 07:47:52 crc kubenswrapper[4612]: E1203 07:47:52.061304 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.061310 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.061461 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="47ca23ae-a3cf-4fbb-b2d8-41d188321af5" containerName="nova-api-log" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.061476 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f97e1ed8-5ec1-4b29-bb43-8ff083052bfd" containerName="nova-manage" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.061484 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.061493 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d673a97-69bf-4125-9b73-d9b79c8f4ab0" containerName="nova-scheduler-scheduler" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.061504 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="47ca23ae-a3cf-4fbb-b2d8-41d188321af5" containerName="nova-api-api" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.061511 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon-log" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.061518 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="7462fb55-15b7-4416-b34f-23893766b5ed" containerName="horizon" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.062096 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.064852 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.074706 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.076692 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.086865 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.105260 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.116907 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.177310 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " pod="openstack/nova-api-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.177391 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7dmk\" (UniqueName: \"kubernetes.io/projected/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-kube-api-access-g7dmk\") pod \"nova-api-0\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " pod="openstack/nova-api-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.177574 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-config-data\") pod \"nova-api-0\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " pod="openstack/nova-api-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.177618 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-config-data\") pod \"nova-scheduler-0\" (UID: \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.177635 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llldl\" (UniqueName: \"kubernetes.io/projected/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-kube-api-access-llldl\") pod \"nova-scheduler-0\" (UID: \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.177834 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-logs\") pod \"nova-api-0\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " pod="openstack/nova-api-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.177978 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.279770 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " pod="openstack/nova-api-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.279839 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7dmk\" (UniqueName: \"kubernetes.io/projected/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-kube-api-access-g7dmk\") pod \"nova-api-0\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " pod="openstack/nova-api-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.279901 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-config-data\") pod \"nova-api-0\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " pod="openstack/nova-api-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.279936 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-config-data\") pod \"nova-scheduler-0\" (UID: \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.279964 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llldl\" (UniqueName: \"kubernetes.io/projected/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-kube-api-access-llldl\") pod \"nova-scheduler-0\" (UID: \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.280004 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-logs\") pod \"nova-api-0\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " pod="openstack/nova-api-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.280045 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.281205 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-logs\") pod \"nova-api-0\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " pod="openstack/nova-api-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.285845 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-config-data\") pod \"nova-api-0\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " pod="openstack/nova-api-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.286107 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " pod="openstack/nova-api-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.286571 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-config-data\") pod \"nova-scheduler-0\" (UID: \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.288054 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.301070 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llldl\" (UniqueName: \"kubernetes.io/projected/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-kube-api-access-llldl\") pod \"nova-scheduler-0\" (UID: \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\") " pod="openstack/nova-scheduler-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.311614 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7dmk\" (UniqueName: \"kubernetes.io/projected/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-kube-api-access-g7dmk\") pod \"nova-api-0\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " pod="openstack/nova-api-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.383081 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.396715 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.582040 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.882428 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:47:52 crc kubenswrapper[4612]: I1203 07:47:52.956267 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:47:52 crc kubenswrapper[4612]: W1203 07:47:52.962880 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64a9499f_d0a5_4581_85df_ff00cf6f4dfd.slice/crio-48a3465e1b4c09d72e1cb7dd6f0e80eeadca056d7395660d348329f4e4ea25a3 WatchSource:0}: Error finding container 48a3465e1b4c09d72e1cb7dd6f0e80eeadca056d7395660d348329f4e4ea25a3: Status 404 returned error can't find the container with id 48a3465e1b4c09d72e1cb7dd6f0e80eeadca056d7395660d348329f4e4ea25a3 Dec 03 07:47:53 crc kubenswrapper[4612]: I1203 07:47:53.102686 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d673a97-69bf-4125-9b73-d9b79c8f4ab0" path="/var/lib/kubelet/pods/0d673a97-69bf-4125-9b73-d9b79c8f4ab0/volumes" Dec 03 07:47:53 crc kubenswrapper[4612]: I1203 07:47:53.103772 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47ca23ae-a3cf-4fbb-b2d8-41d188321af5" path="/var/lib/kubelet/pods/47ca23ae-a3cf-4fbb-b2d8-41d188321af5/volumes" Dec 03 07:47:53 crc kubenswrapper[4612]: I1203 07:47:53.398877 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"64a9499f-d0a5-4581-85df-ff00cf6f4dfd","Type":"ContainerStarted","Data":"7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81"} Dec 03 07:47:53 crc kubenswrapper[4612]: I1203 07:47:53.398917 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"64a9499f-d0a5-4581-85df-ff00cf6f4dfd","Type":"ContainerStarted","Data":"48a3465e1b4c09d72e1cb7dd6f0e80eeadca056d7395660d348329f4e4ea25a3"} Dec 03 07:47:53 crc kubenswrapper[4612]: I1203 07:47:53.401413 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d","Type":"ContainerStarted","Data":"1309e538bf496d0fff03c1be16118714e7169817a7aacfc13150e6c72731d502"} Dec 03 07:47:53 crc kubenswrapper[4612]: I1203 07:47:53.401452 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d","Type":"ContainerStarted","Data":"d8cf9da7e19b59620353c5f325172c4ddac8be7ae0d5a5952363b0bf5078c7a7"} Dec 03 07:47:53 crc kubenswrapper[4612]: I1203 07:47:53.401464 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d","Type":"ContainerStarted","Data":"8268e2497c383c88699ce9cbd4b4a8231840ebb203833f1c458cb500888a2b76"} Dec 03 07:47:53 crc kubenswrapper[4612]: I1203 07:47:53.437938 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.437914371 podStartE2EDuration="1.437914371s" podCreationTimestamp="2025-12-03 07:47:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:47:53.425447703 +0000 UTC m=+1236.598805133" watchObservedRunningTime="2025-12-03 07:47:53.437914371 +0000 UTC m=+1236.611271771" Dec 03 07:47:53 crc kubenswrapper[4612]: I1203 07:47:53.456858 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.456838629 podStartE2EDuration="1.456838629s" podCreationTimestamp="2025-12-03 07:47:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:47:53.441339036 +0000 UTC m=+1236.614696436" watchObservedRunningTime="2025-12-03 07:47:53.456838629 +0000 UTC m=+1236.630196039" Dec 03 07:47:55 crc kubenswrapper[4612]: I1203 07:47:55.664694 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 03 07:47:56 crc kubenswrapper[4612]: I1203 07:47:56.764351 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 07:47:56 crc kubenswrapper[4612]: I1203 07:47:56.764576 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="f305106f-eafb-456f-b958-3895a14ec520" containerName="kube-state-metrics" containerID="cri-o://0280b0747f0310cfcb70949b44d66648897d495b3953cb0f74f3bd6f8a1038b5" gracePeriod=30 Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.260188 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.384159 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.413043 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6qb4\" (UniqueName: \"kubernetes.io/projected/f305106f-eafb-456f-b958-3895a14ec520-kube-api-access-s6qb4\") pod \"f305106f-eafb-456f-b958-3895a14ec520\" (UID: \"f305106f-eafb-456f-b958-3895a14ec520\") " Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.419677 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f305106f-eafb-456f-b958-3895a14ec520-kube-api-access-s6qb4" (OuterVolumeSpecName: "kube-api-access-s6qb4") pod "f305106f-eafb-456f-b958-3895a14ec520" (UID: "f305106f-eafb-456f-b958-3895a14ec520"). InnerVolumeSpecName "kube-api-access-s6qb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.441769 4612 generic.go:334] "Generic (PLEG): container finished" podID="f305106f-eafb-456f-b958-3895a14ec520" containerID="0280b0747f0310cfcb70949b44d66648897d495b3953cb0f74f3bd6f8a1038b5" exitCode=2 Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.442059 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f305106f-eafb-456f-b958-3895a14ec520","Type":"ContainerDied","Data":"0280b0747f0310cfcb70949b44d66648897d495b3953cb0f74f3bd6f8a1038b5"} Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.443110 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f305106f-eafb-456f-b958-3895a14ec520","Type":"ContainerDied","Data":"46b23469281868f2f20254be28918002e53739bca381ffee50452e2446251dc1"} Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.442082 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.443215 4612 scope.go:117] "RemoveContainer" containerID="0280b0747f0310cfcb70949b44d66648897d495b3953cb0f74f3bd6f8a1038b5" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.496859 4612 scope.go:117] "RemoveContainer" containerID="0280b0747f0310cfcb70949b44d66648897d495b3953cb0f74f3bd6f8a1038b5" Dec 03 07:47:57 crc kubenswrapper[4612]: E1203 07:47:57.497356 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0280b0747f0310cfcb70949b44d66648897d495b3953cb0f74f3bd6f8a1038b5\": container with ID starting with 0280b0747f0310cfcb70949b44d66648897d495b3953cb0f74f3bd6f8a1038b5 not found: ID does not exist" containerID="0280b0747f0310cfcb70949b44d66648897d495b3953cb0f74f3bd6f8a1038b5" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.497391 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0280b0747f0310cfcb70949b44d66648897d495b3953cb0f74f3bd6f8a1038b5"} err="failed to get container status \"0280b0747f0310cfcb70949b44d66648897d495b3953cb0f74f3bd6f8a1038b5\": rpc error: code = NotFound desc = could not find container \"0280b0747f0310cfcb70949b44d66648897d495b3953cb0f74f3bd6f8a1038b5\": container with ID starting with 0280b0747f0310cfcb70949b44d66648897d495b3953cb0f74f3bd6f8a1038b5 not found: ID does not exist" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.499565 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.515070 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.515160 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6qb4\" (UniqueName: \"kubernetes.io/projected/f305106f-eafb-456f-b958-3895a14ec520-kube-api-access-s6qb4\") on node \"crc\" DevicePath \"\"" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.525097 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 07:47:57 crc kubenswrapper[4612]: E1203 07:47:57.525538 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f305106f-eafb-456f-b958-3895a14ec520" containerName="kube-state-metrics" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.525555 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f305106f-eafb-456f-b958-3895a14ec520" containerName="kube-state-metrics" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.525715 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f305106f-eafb-456f-b958-3895a14ec520" containerName="kube-state-metrics" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.526393 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.528181 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.528295 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.545453 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.616965 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67686a82-cd3d-4b98-ab0f-b2e37c74a12f-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"67686a82-cd3d-4b98-ab0f-b2e37c74a12f\") " pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.617098 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/67686a82-cd3d-4b98-ab0f-b2e37c74a12f-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"67686a82-cd3d-4b98-ab0f-b2e37c74a12f\") " pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.617138 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jllrj\" (UniqueName: \"kubernetes.io/projected/67686a82-cd3d-4b98-ab0f-b2e37c74a12f-kube-api-access-jllrj\") pod \"kube-state-metrics-0\" (UID: \"67686a82-cd3d-4b98-ab0f-b2e37c74a12f\") " pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.617194 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/67686a82-cd3d-4b98-ab0f-b2e37c74a12f-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"67686a82-cd3d-4b98-ab0f-b2e37c74a12f\") " pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.719279 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/67686a82-cd3d-4b98-ab0f-b2e37c74a12f-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"67686a82-cd3d-4b98-ab0f-b2e37c74a12f\") " pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.719406 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67686a82-cd3d-4b98-ab0f-b2e37c74a12f-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"67686a82-cd3d-4b98-ab0f-b2e37c74a12f\") " pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.719508 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/67686a82-cd3d-4b98-ab0f-b2e37c74a12f-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"67686a82-cd3d-4b98-ab0f-b2e37c74a12f\") " pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.719551 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jllrj\" (UniqueName: \"kubernetes.io/projected/67686a82-cd3d-4b98-ab0f-b2e37c74a12f-kube-api-access-jllrj\") pod \"kube-state-metrics-0\" (UID: \"67686a82-cd3d-4b98-ab0f-b2e37c74a12f\") " pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.724252 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67686a82-cd3d-4b98-ab0f-b2e37c74a12f-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"67686a82-cd3d-4b98-ab0f-b2e37c74a12f\") " pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.726557 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/67686a82-cd3d-4b98-ab0f-b2e37c74a12f-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"67686a82-cd3d-4b98-ab0f-b2e37c74a12f\") " pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.727727 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/67686a82-cd3d-4b98-ab0f-b2e37c74a12f-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"67686a82-cd3d-4b98-ab0f-b2e37c74a12f\") " pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.739381 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jllrj\" (UniqueName: \"kubernetes.io/projected/67686a82-cd3d-4b98-ab0f-b2e37c74a12f-kube-api-access-jllrj\") pod \"kube-state-metrics-0\" (UID: \"67686a82-cd3d-4b98-ab0f-b2e37c74a12f\") " pod="openstack/kube-state-metrics-0" Dec 03 07:47:57 crc kubenswrapper[4612]: I1203 07:47:57.845264 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 07:47:58 crc kubenswrapper[4612]: I1203 07:47:58.293106 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 07:47:58 crc kubenswrapper[4612]: I1203 07:47:58.453543 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"67686a82-cd3d-4b98-ab0f-b2e37c74a12f","Type":"ContainerStarted","Data":"c1dce8dcff434914a6be175edae927e8d3cbb966fdb98deddd66dc21db982e15"} Dec 03 07:47:58 crc kubenswrapper[4612]: I1203 07:47:58.679987 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:47:58 crc kubenswrapper[4612]: I1203 07:47:58.681042 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="ceilometer-central-agent" containerID="cri-o://342a36c885660e93446eeba25c3e71e86529e69e047e1725b0501b4147156967" gracePeriod=30 Dec 03 07:47:58 crc kubenswrapper[4612]: I1203 07:47:58.681667 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="proxy-httpd" containerID="cri-o://10a80cc13e56066297ff05b4ed913032da852fd0b364a56133a3c01308364816" gracePeriod=30 Dec 03 07:47:58 crc kubenswrapper[4612]: I1203 07:47:58.681822 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="sg-core" containerID="cri-o://bacf26bb814efdd07b2f119882a59f1df7acbbadc12192a18ac752db5f98a59a" gracePeriod=30 Dec 03 07:47:58 crc kubenswrapper[4612]: I1203 07:47:58.681967 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="ceilometer-notification-agent" containerID="cri-o://60adffd544564dbd18f673d1700798d68d91fbcf2898a77e2b54e0f4a11e161d" gracePeriod=30 Dec 03 07:47:59 crc kubenswrapper[4612]: I1203 07:47:59.106269 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f305106f-eafb-456f-b958-3895a14ec520" path="/var/lib/kubelet/pods/f305106f-eafb-456f-b958-3895a14ec520/volumes" Dec 03 07:47:59 crc kubenswrapper[4612]: I1203 07:47:59.464310 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"67686a82-cd3d-4b98-ab0f-b2e37c74a12f","Type":"ContainerStarted","Data":"260175b0b35c95bf48d3ae9e32b51f4f4cb0657612bacd3dba2a3a846a6ecf5e"} Dec 03 07:47:59 crc kubenswrapper[4612]: I1203 07:47:59.464677 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 03 07:47:59 crc kubenswrapper[4612]: I1203 07:47:59.466908 4612 generic.go:334] "Generic (PLEG): container finished" podID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerID="10a80cc13e56066297ff05b4ed913032da852fd0b364a56133a3c01308364816" exitCode=0 Dec 03 07:47:59 crc kubenswrapper[4612]: I1203 07:47:59.466977 4612 generic.go:334] "Generic (PLEG): container finished" podID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerID="bacf26bb814efdd07b2f119882a59f1df7acbbadc12192a18ac752db5f98a59a" exitCode=2 Dec 03 07:47:59 crc kubenswrapper[4612]: I1203 07:47:59.466991 4612 generic.go:334] "Generic (PLEG): container finished" podID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerID="342a36c885660e93446eeba25c3e71e86529e69e047e1725b0501b4147156967" exitCode=0 Dec 03 07:47:59 crc kubenswrapper[4612]: I1203 07:47:59.466992 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e3640d30-85b5-4b87-8860-ca61e792f7f7","Type":"ContainerDied","Data":"10a80cc13e56066297ff05b4ed913032da852fd0b364a56133a3c01308364816"} Dec 03 07:47:59 crc kubenswrapper[4612]: I1203 07:47:59.467040 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e3640d30-85b5-4b87-8860-ca61e792f7f7","Type":"ContainerDied","Data":"bacf26bb814efdd07b2f119882a59f1df7acbbadc12192a18ac752db5f98a59a"} Dec 03 07:47:59 crc kubenswrapper[4612]: I1203 07:47:59.467057 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e3640d30-85b5-4b87-8860-ca61e792f7f7","Type":"ContainerDied","Data":"342a36c885660e93446eeba25c3e71e86529e69e047e1725b0501b4147156967"} Dec 03 07:47:59 crc kubenswrapper[4612]: I1203 07:47:59.483570 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.050386391 podStartE2EDuration="2.483545782s" podCreationTimestamp="2025-12-03 07:47:57 +0000 UTC" firstStartedPulling="2025-12-03 07:47:58.294733613 +0000 UTC m=+1241.468091013" lastFinishedPulling="2025-12-03 07:47:58.727892994 +0000 UTC m=+1241.901250404" observedRunningTime="2025-12-03 07:47:59.477673537 +0000 UTC m=+1242.651030937" watchObservedRunningTime="2025-12-03 07:47:59.483545782 +0000 UTC m=+1242.656903202" Dec 03 07:48:02 crc kubenswrapper[4612]: I1203 07:48:02.383994 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 07:48:02 crc kubenswrapper[4612]: I1203 07:48:02.398218 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 07:48:02 crc kubenswrapper[4612]: I1203 07:48:02.398252 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 07:48:02 crc kubenswrapper[4612]: I1203 07:48:02.417860 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 07:48:02 crc kubenswrapper[4612]: I1203 07:48:02.535582 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 07:48:02 crc kubenswrapper[4612]: I1203 07:48:02.941748 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.058834 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lm6b\" (UniqueName: \"kubernetes.io/projected/e3640d30-85b5-4b87-8860-ca61e792f7f7-kube-api-access-8lm6b\") pod \"e3640d30-85b5-4b87-8860-ca61e792f7f7\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.059082 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-config-data\") pod \"e3640d30-85b5-4b87-8860-ca61e792f7f7\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.059230 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3640d30-85b5-4b87-8860-ca61e792f7f7-run-httpd\") pod \"e3640d30-85b5-4b87-8860-ca61e792f7f7\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.059365 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-sg-core-conf-yaml\") pod \"e3640d30-85b5-4b87-8860-ca61e792f7f7\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.059394 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-scripts\") pod \"e3640d30-85b5-4b87-8860-ca61e792f7f7\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.059677 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-combined-ca-bundle\") pod \"e3640d30-85b5-4b87-8860-ca61e792f7f7\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.059849 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3640d30-85b5-4b87-8860-ca61e792f7f7-log-httpd\") pod \"e3640d30-85b5-4b87-8860-ca61e792f7f7\" (UID: \"e3640d30-85b5-4b87-8860-ca61e792f7f7\") " Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.061087 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3640d30-85b5-4b87-8860-ca61e792f7f7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e3640d30-85b5-4b87-8860-ca61e792f7f7" (UID: "e3640d30-85b5-4b87-8860-ca61e792f7f7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.066139 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3640d30-85b5-4b87-8860-ca61e792f7f7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e3640d30-85b5-4b87-8860-ca61e792f7f7" (UID: "e3640d30-85b5-4b87-8860-ca61e792f7f7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.075743 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3640d30-85b5-4b87-8860-ca61e792f7f7-kube-api-access-8lm6b" (OuterVolumeSpecName: "kube-api-access-8lm6b") pod "e3640d30-85b5-4b87-8860-ca61e792f7f7" (UID: "e3640d30-85b5-4b87-8860-ca61e792f7f7"). InnerVolumeSpecName "kube-api-access-8lm6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.104321 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-scripts" (OuterVolumeSpecName: "scripts") pod "e3640d30-85b5-4b87-8860-ca61e792f7f7" (UID: "e3640d30-85b5-4b87-8860-ca61e792f7f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.162960 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lm6b\" (UniqueName: \"kubernetes.io/projected/e3640d30-85b5-4b87-8860-ca61e792f7f7-kube-api-access-8lm6b\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.163236 4612 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3640d30-85b5-4b87-8860-ca61e792f7f7-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.163350 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.163423 4612 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3640d30-85b5-4b87-8860-ca61e792f7f7-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.200994 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e3640d30-85b5-4b87-8860-ca61e792f7f7" (UID: "e3640d30-85b5-4b87-8860-ca61e792f7f7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.233285 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-config-data" (OuterVolumeSpecName: "config-data") pod "e3640d30-85b5-4b87-8860-ca61e792f7f7" (UID: "e3640d30-85b5-4b87-8860-ca61e792f7f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.250056 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e3640d30-85b5-4b87-8860-ca61e792f7f7" (UID: "e3640d30-85b5-4b87-8860-ca61e792f7f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.265811 4612 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.266722 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.266741 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3640d30-85b5-4b87-8860-ca61e792f7f7-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.480189 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.480195 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.509126 4612 generic.go:334] "Generic (PLEG): container finished" podID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerID="60adffd544564dbd18f673d1700798d68d91fbcf2898a77e2b54e0f4a11e161d" exitCode=0 Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.509183 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e3640d30-85b5-4b87-8860-ca61e792f7f7","Type":"ContainerDied","Data":"60adffd544564dbd18f673d1700798d68d91fbcf2898a77e2b54e0f4a11e161d"} Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.509236 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e3640d30-85b5-4b87-8860-ca61e792f7f7","Type":"ContainerDied","Data":"70cf633d885a11a761cf80b91ef6a1be9d17be4c66ea4b3aac262844253a9f83"} Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.509254 4612 scope.go:117] "RemoveContainer" containerID="10a80cc13e56066297ff05b4ed913032da852fd0b364a56133a3c01308364816" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.509613 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.564748 4612 scope.go:117] "RemoveContainer" containerID="bacf26bb814efdd07b2f119882a59f1df7acbbadc12192a18ac752db5f98a59a" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.568748 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.578240 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.588269 4612 scope.go:117] "RemoveContainer" containerID="60adffd544564dbd18f673d1700798d68d91fbcf2898a77e2b54e0f4a11e161d" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.607648 4612 scope.go:117] "RemoveContainer" containerID="342a36c885660e93446eeba25c3e71e86529e69e047e1725b0501b4147156967" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.612337 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:48:03 crc kubenswrapper[4612]: E1203 07:48:03.612683 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="ceilometer-notification-agent" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.612700 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="ceilometer-notification-agent" Dec 03 07:48:03 crc kubenswrapper[4612]: E1203 07:48:03.612717 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="sg-core" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.612724 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="sg-core" Dec 03 07:48:03 crc kubenswrapper[4612]: E1203 07:48:03.612746 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="ceilometer-central-agent" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.612751 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="ceilometer-central-agent" Dec 03 07:48:03 crc kubenswrapper[4612]: E1203 07:48:03.612762 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="proxy-httpd" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.612767 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="proxy-httpd" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.612926 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="proxy-httpd" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.618231 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="sg-core" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.618258 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="ceilometer-notification-agent" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.618288 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" containerName="ceilometer-central-agent" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.620489 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.622710 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.627822 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.631082 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.631462 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.640272 4612 scope.go:117] "RemoveContainer" containerID="10a80cc13e56066297ff05b4ed913032da852fd0b364a56133a3c01308364816" Dec 03 07:48:03 crc kubenswrapper[4612]: E1203 07:48:03.658457 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10a80cc13e56066297ff05b4ed913032da852fd0b364a56133a3c01308364816\": container with ID starting with 10a80cc13e56066297ff05b4ed913032da852fd0b364a56133a3c01308364816 not found: ID does not exist" containerID="10a80cc13e56066297ff05b4ed913032da852fd0b364a56133a3c01308364816" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.658757 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10a80cc13e56066297ff05b4ed913032da852fd0b364a56133a3c01308364816"} err="failed to get container status \"10a80cc13e56066297ff05b4ed913032da852fd0b364a56133a3c01308364816\": rpc error: code = NotFound desc = could not find container \"10a80cc13e56066297ff05b4ed913032da852fd0b364a56133a3c01308364816\": container with ID starting with 10a80cc13e56066297ff05b4ed913032da852fd0b364a56133a3c01308364816 not found: ID does not exist" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.658878 4612 scope.go:117] "RemoveContainer" containerID="bacf26bb814efdd07b2f119882a59f1df7acbbadc12192a18ac752db5f98a59a" Dec 03 07:48:03 crc kubenswrapper[4612]: E1203 07:48:03.660676 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bacf26bb814efdd07b2f119882a59f1df7acbbadc12192a18ac752db5f98a59a\": container with ID starting with bacf26bb814efdd07b2f119882a59f1df7acbbadc12192a18ac752db5f98a59a not found: ID does not exist" containerID="bacf26bb814efdd07b2f119882a59f1df7acbbadc12192a18ac752db5f98a59a" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.660736 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bacf26bb814efdd07b2f119882a59f1df7acbbadc12192a18ac752db5f98a59a"} err="failed to get container status \"bacf26bb814efdd07b2f119882a59f1df7acbbadc12192a18ac752db5f98a59a\": rpc error: code = NotFound desc = could not find container \"bacf26bb814efdd07b2f119882a59f1df7acbbadc12192a18ac752db5f98a59a\": container with ID starting with bacf26bb814efdd07b2f119882a59f1df7acbbadc12192a18ac752db5f98a59a not found: ID does not exist" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.660778 4612 scope.go:117] "RemoveContainer" containerID="60adffd544564dbd18f673d1700798d68d91fbcf2898a77e2b54e0f4a11e161d" Dec 03 07:48:03 crc kubenswrapper[4612]: E1203 07:48:03.661085 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60adffd544564dbd18f673d1700798d68d91fbcf2898a77e2b54e0f4a11e161d\": container with ID starting with 60adffd544564dbd18f673d1700798d68d91fbcf2898a77e2b54e0f4a11e161d not found: ID does not exist" containerID="60adffd544564dbd18f673d1700798d68d91fbcf2898a77e2b54e0f4a11e161d" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.661128 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60adffd544564dbd18f673d1700798d68d91fbcf2898a77e2b54e0f4a11e161d"} err="failed to get container status \"60adffd544564dbd18f673d1700798d68d91fbcf2898a77e2b54e0f4a11e161d\": rpc error: code = NotFound desc = could not find container \"60adffd544564dbd18f673d1700798d68d91fbcf2898a77e2b54e0f4a11e161d\": container with ID starting with 60adffd544564dbd18f673d1700798d68d91fbcf2898a77e2b54e0f4a11e161d not found: ID does not exist" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.661143 4612 scope.go:117] "RemoveContainer" containerID="342a36c885660e93446eeba25c3e71e86529e69e047e1725b0501b4147156967" Dec 03 07:48:03 crc kubenswrapper[4612]: E1203 07:48:03.661369 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"342a36c885660e93446eeba25c3e71e86529e69e047e1725b0501b4147156967\": container with ID starting with 342a36c885660e93446eeba25c3e71e86529e69e047e1725b0501b4147156967 not found: ID does not exist" containerID="342a36c885660e93446eeba25c3e71e86529e69e047e1725b0501b4147156967" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.661389 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"342a36c885660e93446eeba25c3e71e86529e69e047e1725b0501b4147156967"} err="failed to get container status \"342a36c885660e93446eeba25c3e71e86529e69e047e1725b0501b4147156967\": rpc error: code = NotFound desc = could not find container \"342a36c885660e93446eeba25c3e71e86529e69e047e1725b0501b4147156967\": container with ID starting with 342a36c885660e93446eeba25c3e71e86529e69e047e1725b0501b4147156967 not found: ID does not exist" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.674384 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-log-httpd\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.674432 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-config-data\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.674494 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-scripts\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.674520 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdqnp\" (UniqueName: \"kubernetes.io/projected/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-kube-api-access-cdqnp\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.674542 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.674599 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.674695 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.674757 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-run-httpd\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.776747 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.776846 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-run-httpd\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.776891 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-log-httpd\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.776936 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-config-data\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.777031 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-scripts\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.777067 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdqnp\" (UniqueName: \"kubernetes.io/projected/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-kube-api-access-cdqnp\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.777096 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.777121 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.777912 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-log-httpd\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.778987 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-run-httpd\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.781396 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.781836 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.781993 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.782304 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-config-data\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.787479 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-scripts\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.801103 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdqnp\" (UniqueName: \"kubernetes.io/projected/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-kube-api-access-cdqnp\") pod \"ceilometer-0\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " pod="openstack/ceilometer-0" Dec 03 07:48:03 crc kubenswrapper[4612]: I1203 07:48:03.935996 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:48:04 crc kubenswrapper[4612]: I1203 07:48:04.441349 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:48:04 crc kubenswrapper[4612]: W1203 07:48:04.441642 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ae5e107_dd15_438e_9d41_6ed9c0c46b26.slice/crio-5c222c586f95104a0f5749f5ff8d203ed37e755da1925b8b9174e3676b35b683 WatchSource:0}: Error finding container 5c222c586f95104a0f5749f5ff8d203ed37e755da1925b8b9174e3676b35b683: Status 404 returned error can't find the container with id 5c222c586f95104a0f5749f5ff8d203ed37e755da1925b8b9174e3676b35b683 Dec 03 07:48:04 crc kubenswrapper[4612]: I1203 07:48:04.519981 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ae5e107-dd15-438e-9d41-6ed9c0c46b26","Type":"ContainerStarted","Data":"5c222c586f95104a0f5749f5ff8d203ed37e755da1925b8b9174e3676b35b683"} Dec 03 07:48:05 crc kubenswrapper[4612]: I1203 07:48:05.107661 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3640d30-85b5-4b87-8860-ca61e792f7f7" path="/var/lib/kubelet/pods/e3640d30-85b5-4b87-8860-ca61e792f7f7/volumes" Dec 03 07:48:05 crc kubenswrapper[4612]: I1203 07:48:05.530972 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ae5e107-dd15-438e-9d41-6ed9c0c46b26","Type":"ContainerStarted","Data":"e7b414fcf8cbce2dc9cffa2ca36674ed1d15f1a3b71c95d0ac4383e2f8277c61"} Dec 03 07:48:06 crc kubenswrapper[4612]: I1203 07:48:06.624208 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ae5e107-dd15-438e-9d41-6ed9c0c46b26","Type":"ContainerStarted","Data":"26417732c7ec6f5213d56223fea20a7200091f05a67b4f504f95c01b7a9b2e8f"} Dec 03 07:48:06 crc kubenswrapper[4612]: I1203 07:48:06.769081 4612 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 07:48:07 crc kubenswrapper[4612]: I1203 07:48:07.658308 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ae5e107-dd15-438e-9d41-6ed9c0c46b26","Type":"ContainerStarted","Data":"c01e614bbbb948346279da520ee18afda134c0f55c3e9f424a70b14275c1d535"} Dec 03 07:48:07 crc kubenswrapper[4612]: I1203 07:48:07.856281 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 03 07:48:08 crc kubenswrapper[4612]: I1203 07:48:08.673581 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ae5e107-dd15-438e-9d41-6ed9c0c46b26","Type":"ContainerStarted","Data":"5e20d15d3cc5a0d6c80d544901ea88c2963df2ec1eaec5abe5fb2991d0a9931e"} Dec 03 07:48:08 crc kubenswrapper[4612]: I1203 07:48:08.674226 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 07:48:08 crc kubenswrapper[4612]: I1203 07:48:08.718157 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.271000039 podStartE2EDuration="5.718133748s" podCreationTimestamp="2025-12-03 07:48:03 +0000 UTC" firstStartedPulling="2025-12-03 07:48:04.444074999 +0000 UTC m=+1247.617432399" lastFinishedPulling="2025-12-03 07:48:07.891208718 +0000 UTC m=+1251.064566108" observedRunningTime="2025-12-03 07:48:08.699088787 +0000 UTC m=+1251.872446267" watchObservedRunningTime="2025-12-03 07:48:08.718133748 +0000 UTC m=+1251.891491178" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.630378 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.633739 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.685572 4612 generic.go:334] "Generic (PLEG): container finished" podID="f876fbab-9c17-44f0-b708-27d573e171e8" containerID="5c63a8f3b4bc08ea4bf19d72dc1b730f487fb5646a44e512450e971f2f435641" exitCode=137 Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.685629 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f876fbab-9c17-44f0-b708-27d573e171e8","Type":"ContainerDied","Data":"5c63a8f3b4bc08ea4bf19d72dc1b730f487fb5646a44e512450e971f2f435641"} Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.685656 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f876fbab-9c17-44f0-b708-27d573e171e8","Type":"ContainerDied","Data":"d4486605f41aeec35170d01b01b11be7efc4147eb984c69b43b267eea7974232"} Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.685672 4612 scope.go:117] "RemoveContainer" containerID="5c63a8f3b4bc08ea4bf19d72dc1b730f487fb5646a44e512450e971f2f435641" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.685784 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.689180 4612 generic.go:334] "Generic (PLEG): container finished" podID="1d96560d-e8ba-436b-b4a9-f157564a0022" containerID="35832863b91872f08668a71898a86aa1ca77e18a982e168ff9c1649a0d727e74" exitCode=137 Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.689665 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.690064 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1d96560d-e8ba-436b-b4a9-f157564a0022","Type":"ContainerDied","Data":"35832863b91872f08668a71898a86aa1ca77e18a982e168ff9c1649a0d727e74"} Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.690164 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1d96560d-e8ba-436b-b4a9-f157564a0022","Type":"ContainerDied","Data":"14f6b676a10ba9a5bf2115faf9fa304902df9d185cbaaa206ddebee12d95df84"} Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.706710 4612 scope.go:117] "RemoveContainer" containerID="5c63a8f3b4bc08ea4bf19d72dc1b730f487fb5646a44e512450e971f2f435641" Dec 03 07:48:09 crc kubenswrapper[4612]: E1203 07:48:09.707115 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c63a8f3b4bc08ea4bf19d72dc1b730f487fb5646a44e512450e971f2f435641\": container with ID starting with 5c63a8f3b4bc08ea4bf19d72dc1b730f487fb5646a44e512450e971f2f435641 not found: ID does not exist" containerID="5c63a8f3b4bc08ea4bf19d72dc1b730f487fb5646a44e512450e971f2f435641" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.707147 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c63a8f3b4bc08ea4bf19d72dc1b730f487fb5646a44e512450e971f2f435641"} err="failed to get container status \"5c63a8f3b4bc08ea4bf19d72dc1b730f487fb5646a44e512450e971f2f435641\": rpc error: code = NotFound desc = could not find container \"5c63a8f3b4bc08ea4bf19d72dc1b730f487fb5646a44e512450e971f2f435641\": container with ID starting with 5c63a8f3b4bc08ea4bf19d72dc1b730f487fb5646a44e512450e971f2f435641 not found: ID does not exist" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.707168 4612 scope.go:117] "RemoveContainer" containerID="35832863b91872f08668a71898a86aa1ca77e18a982e168ff9c1649a0d727e74" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.726420 4612 scope.go:117] "RemoveContainer" containerID="6bf7732d10a5b2b08dcca230c5417ed0dce12b823ab83890c76288b2dabe1840" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.743079 4612 scope.go:117] "RemoveContainer" containerID="35832863b91872f08668a71898a86aa1ca77e18a982e168ff9c1649a0d727e74" Dec 03 07:48:09 crc kubenswrapper[4612]: E1203 07:48:09.743544 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35832863b91872f08668a71898a86aa1ca77e18a982e168ff9c1649a0d727e74\": container with ID starting with 35832863b91872f08668a71898a86aa1ca77e18a982e168ff9c1649a0d727e74 not found: ID does not exist" containerID="35832863b91872f08668a71898a86aa1ca77e18a982e168ff9c1649a0d727e74" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.743611 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35832863b91872f08668a71898a86aa1ca77e18a982e168ff9c1649a0d727e74"} err="failed to get container status \"35832863b91872f08668a71898a86aa1ca77e18a982e168ff9c1649a0d727e74\": rpc error: code = NotFound desc = could not find container \"35832863b91872f08668a71898a86aa1ca77e18a982e168ff9c1649a0d727e74\": container with ID starting with 35832863b91872f08668a71898a86aa1ca77e18a982e168ff9c1649a0d727e74 not found: ID does not exist" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.743642 4612 scope.go:117] "RemoveContainer" containerID="6bf7732d10a5b2b08dcca230c5417ed0dce12b823ab83890c76288b2dabe1840" Dec 03 07:48:09 crc kubenswrapper[4612]: E1203 07:48:09.744153 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bf7732d10a5b2b08dcca230c5417ed0dce12b823ab83890c76288b2dabe1840\": container with ID starting with 6bf7732d10a5b2b08dcca230c5417ed0dce12b823ab83890c76288b2dabe1840 not found: ID does not exist" containerID="6bf7732d10a5b2b08dcca230c5417ed0dce12b823ab83890c76288b2dabe1840" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.744186 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bf7732d10a5b2b08dcca230c5417ed0dce12b823ab83890c76288b2dabe1840"} err="failed to get container status \"6bf7732d10a5b2b08dcca230c5417ed0dce12b823ab83890c76288b2dabe1840\": rpc error: code = NotFound desc = could not find container \"6bf7732d10a5b2b08dcca230c5417ed0dce12b823ab83890c76288b2dabe1840\": container with ID starting with 6bf7732d10a5b2b08dcca230c5417ed0dce12b823ab83890c76288b2dabe1840 not found: ID does not exist" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.790931 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6dqn\" (UniqueName: \"kubernetes.io/projected/f876fbab-9c17-44f0-b708-27d573e171e8-kube-api-access-c6dqn\") pod \"f876fbab-9c17-44f0-b708-27d573e171e8\" (UID: \"f876fbab-9c17-44f0-b708-27d573e171e8\") " Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.790994 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d96560d-e8ba-436b-b4a9-f157564a0022-config-data\") pod \"1d96560d-e8ba-436b-b4a9-f157564a0022\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.791059 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zr6l6\" (UniqueName: \"kubernetes.io/projected/1d96560d-e8ba-436b-b4a9-f157564a0022-kube-api-access-zr6l6\") pod \"1d96560d-e8ba-436b-b4a9-f157564a0022\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.791091 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d96560d-e8ba-436b-b4a9-f157564a0022-combined-ca-bundle\") pod \"1d96560d-e8ba-436b-b4a9-f157564a0022\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.791126 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f876fbab-9c17-44f0-b708-27d573e171e8-config-data\") pod \"f876fbab-9c17-44f0-b708-27d573e171e8\" (UID: \"f876fbab-9c17-44f0-b708-27d573e171e8\") " Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.791144 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d96560d-e8ba-436b-b4a9-f157564a0022-logs\") pod \"1d96560d-e8ba-436b-b4a9-f157564a0022\" (UID: \"1d96560d-e8ba-436b-b4a9-f157564a0022\") " Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.791263 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f876fbab-9c17-44f0-b708-27d573e171e8-combined-ca-bundle\") pod \"f876fbab-9c17-44f0-b708-27d573e171e8\" (UID: \"f876fbab-9c17-44f0-b708-27d573e171e8\") " Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.792600 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d96560d-e8ba-436b-b4a9-f157564a0022-logs" (OuterVolumeSpecName: "logs") pod "1d96560d-e8ba-436b-b4a9-f157564a0022" (UID: "1d96560d-e8ba-436b-b4a9-f157564a0022"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.796184 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d96560d-e8ba-436b-b4a9-f157564a0022-kube-api-access-zr6l6" (OuterVolumeSpecName: "kube-api-access-zr6l6") pod "1d96560d-e8ba-436b-b4a9-f157564a0022" (UID: "1d96560d-e8ba-436b-b4a9-f157564a0022"). InnerVolumeSpecName "kube-api-access-zr6l6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.796256 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f876fbab-9c17-44f0-b708-27d573e171e8-kube-api-access-c6dqn" (OuterVolumeSpecName: "kube-api-access-c6dqn") pod "f876fbab-9c17-44f0-b708-27d573e171e8" (UID: "f876fbab-9c17-44f0-b708-27d573e171e8"). InnerVolumeSpecName "kube-api-access-c6dqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.816961 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d96560d-e8ba-436b-b4a9-f157564a0022-config-data" (OuterVolumeSpecName: "config-data") pod "1d96560d-e8ba-436b-b4a9-f157564a0022" (UID: "1d96560d-e8ba-436b-b4a9-f157564a0022"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.817556 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f876fbab-9c17-44f0-b708-27d573e171e8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f876fbab-9c17-44f0-b708-27d573e171e8" (UID: "f876fbab-9c17-44f0-b708-27d573e171e8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.817828 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f876fbab-9c17-44f0-b708-27d573e171e8-config-data" (OuterVolumeSpecName: "config-data") pod "f876fbab-9c17-44f0-b708-27d573e171e8" (UID: "f876fbab-9c17-44f0-b708-27d573e171e8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.819509 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d96560d-e8ba-436b-b4a9-f157564a0022-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d96560d-e8ba-436b-b4a9-f157564a0022" (UID: "1d96560d-e8ba-436b-b4a9-f157564a0022"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.893126 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f876fbab-9c17-44f0-b708-27d573e171e8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.893161 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6dqn\" (UniqueName: \"kubernetes.io/projected/f876fbab-9c17-44f0-b708-27d573e171e8-kube-api-access-c6dqn\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.893170 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d96560d-e8ba-436b-b4a9-f157564a0022-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.893179 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zr6l6\" (UniqueName: \"kubernetes.io/projected/1d96560d-e8ba-436b-b4a9-f157564a0022-kube-api-access-zr6l6\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.893189 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d96560d-e8ba-436b-b4a9-f157564a0022-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.893200 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f876fbab-9c17-44f0-b708-27d573e171e8-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:09 crc kubenswrapper[4612]: I1203 07:48:09.893213 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d96560d-e8ba-436b-b4a9-f157564a0022-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.031692 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.046655 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.067001 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.090672 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.109654 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 07:48:10 crc kubenswrapper[4612]: E1203 07:48:10.110626 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f876fbab-9c17-44f0-b708-27d573e171e8" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.110647 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f876fbab-9c17-44f0-b708-27d573e171e8" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 07:48:10 crc kubenswrapper[4612]: E1203 07:48:10.110658 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d96560d-e8ba-436b-b4a9-f157564a0022" containerName="nova-metadata-metadata" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.110666 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d96560d-e8ba-436b-b4a9-f157564a0022" containerName="nova-metadata-metadata" Dec 03 07:48:10 crc kubenswrapper[4612]: E1203 07:48:10.110688 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d96560d-e8ba-436b-b4a9-f157564a0022" containerName="nova-metadata-log" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.110694 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d96560d-e8ba-436b-b4a9-f157564a0022" containerName="nova-metadata-log" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.110861 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d96560d-e8ba-436b-b4a9-f157564a0022" containerName="nova-metadata-metadata" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.110873 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d96560d-e8ba-436b-b4a9-f157564a0022" containerName="nova-metadata-log" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.110894 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f876fbab-9c17-44f0-b708-27d573e171e8" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.111480 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.114576 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.114815 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.120253 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.120370 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.142653 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.144488 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.147418 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.147616 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.147983 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.203081 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/513ba267-34e9-4883-9eb1-55f5441813db-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.203174 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/513ba267-34e9-4883-9eb1-55f5441813db-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.203305 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/513ba267-34e9-4883-9eb1-55f5441813db-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.203445 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6hpq\" (UniqueName: \"kubernetes.io/projected/513ba267-34e9-4883-9eb1-55f5441813db-kube-api-access-r6hpq\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.203713 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/513ba267-34e9-4883-9eb1-55f5441813db-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.305030 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/513ba267-34e9-4883-9eb1-55f5441813db-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.305094 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.305123 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgdj6\" (UniqueName: \"kubernetes.io/projected/32d00d3f-dcaf-4f86-b058-38666d24c6ce-kube-api-access-bgdj6\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.305168 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/513ba267-34e9-4883-9eb1-55f5441813db-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.305205 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/513ba267-34e9-4883-9eb1-55f5441813db-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.305231 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.305266 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6hpq\" (UniqueName: \"kubernetes.io/projected/513ba267-34e9-4883-9eb1-55f5441813db-kube-api-access-r6hpq\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.305295 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d00d3f-dcaf-4f86-b058-38666d24c6ce-logs\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.305327 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-config-data\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.305354 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/513ba267-34e9-4883-9eb1-55f5441813db-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.309037 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/513ba267-34e9-4883-9eb1-55f5441813db-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.309587 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/513ba267-34e9-4883-9eb1-55f5441813db-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.309605 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/513ba267-34e9-4883-9eb1-55f5441813db-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.316047 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/513ba267-34e9-4883-9eb1-55f5441813db-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.320707 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6hpq\" (UniqueName: \"kubernetes.io/projected/513ba267-34e9-4883-9eb1-55f5441813db-kube-api-access-r6hpq\") pod \"nova-cell1-novncproxy-0\" (UID: \"513ba267-34e9-4883-9eb1-55f5441813db\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.406445 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.406495 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgdj6\" (UniqueName: \"kubernetes.io/projected/32d00d3f-dcaf-4f86-b058-38666d24c6ce-kube-api-access-bgdj6\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.406565 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.406603 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d00d3f-dcaf-4f86-b058-38666d24c6ce-logs\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.406634 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-config-data\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.407688 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d00d3f-dcaf-4f86-b058-38666d24c6ce-logs\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.411286 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-config-data\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.414891 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.416514 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.430514 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgdj6\" (UniqueName: \"kubernetes.io/projected/32d00d3f-dcaf-4f86-b058-38666d24c6ce-kube-api-access-bgdj6\") pod \"nova-metadata-0\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.442632 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.469451 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 07:48:10 crc kubenswrapper[4612]: W1203 07:48:10.891103 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod513ba267_34e9_4883_9eb1_55f5441813db.slice/crio-ed52aaad1de6c575d552281ff8b9ab8aefdd4126bb2adb2198f2e07560aa454c WatchSource:0}: Error finding container ed52aaad1de6c575d552281ff8b9ab8aefdd4126bb2adb2198f2e07560aa454c: Status 404 returned error can't find the container with id ed52aaad1de6c575d552281ff8b9ab8aefdd4126bb2adb2198f2e07560aa454c Dec 03 07:48:10 crc kubenswrapper[4612]: I1203 07:48:10.891414 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 07:48:11 crc kubenswrapper[4612]: I1203 07:48:11.066619 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:48:11 crc kubenswrapper[4612]: I1203 07:48:11.103239 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d96560d-e8ba-436b-b4a9-f157564a0022" path="/var/lib/kubelet/pods/1d96560d-e8ba-436b-b4a9-f157564a0022/volumes" Dec 03 07:48:11 crc kubenswrapper[4612]: I1203 07:48:11.104026 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f876fbab-9c17-44f0-b708-27d573e171e8" path="/var/lib/kubelet/pods/f876fbab-9c17-44f0-b708-27d573e171e8/volumes" Dec 03 07:48:11 crc kubenswrapper[4612]: I1203 07:48:11.766182 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"513ba267-34e9-4883-9eb1-55f5441813db","Type":"ContainerStarted","Data":"a556fc7ad7e693eeb94d57d9c357051d3eccd84ffbeaba06166ee2c7b49af3fa"} Dec 03 07:48:11 crc kubenswrapper[4612]: I1203 07:48:11.766734 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"513ba267-34e9-4883-9eb1-55f5441813db","Type":"ContainerStarted","Data":"ed52aaad1de6c575d552281ff8b9ab8aefdd4126bb2adb2198f2e07560aa454c"} Dec 03 07:48:11 crc kubenswrapper[4612]: I1203 07:48:11.768913 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"32d00d3f-dcaf-4f86-b058-38666d24c6ce","Type":"ContainerStarted","Data":"5478b30b8b590343b8e2081970ce529c9e82e19e593bb4eb649758f572bc899c"} Dec 03 07:48:11 crc kubenswrapper[4612]: I1203 07:48:11.769006 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"32d00d3f-dcaf-4f86-b058-38666d24c6ce","Type":"ContainerStarted","Data":"eba7b14d413b6ccd0a8c01776af16f34f4bff5999fc3c1c561e566a6b977edd8"} Dec 03 07:48:11 crc kubenswrapper[4612]: I1203 07:48:11.769023 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"32d00d3f-dcaf-4f86-b058-38666d24c6ce","Type":"ContainerStarted","Data":"0ee97b3664277a0dc7502eabcdef8dabea0f3dca346830d453f572af65b89fc7"} Dec 03 07:48:11 crc kubenswrapper[4612]: I1203 07:48:11.790741 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.7907197350000001 podStartE2EDuration="1.790719735s" podCreationTimestamp="2025-12-03 07:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:48:11.787082405 +0000 UTC m=+1254.960439825" watchObservedRunningTime="2025-12-03 07:48:11.790719735 +0000 UTC m=+1254.964077135" Dec 03 07:48:11 crc kubenswrapper[4612]: I1203 07:48:11.816398 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.8163748690000001 podStartE2EDuration="1.816374869s" podCreationTimestamp="2025-12-03 07:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:48:11.809833677 +0000 UTC m=+1254.983191077" watchObservedRunningTime="2025-12-03 07:48:11.816374869 +0000 UTC m=+1254.989732289" Dec 03 07:48:12 crc kubenswrapper[4612]: I1203 07:48:12.408585 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 07:48:12 crc kubenswrapper[4612]: I1203 07:48:12.408973 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 07:48:12 crc kubenswrapper[4612]: I1203 07:48:12.410730 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 07:48:12 crc kubenswrapper[4612]: I1203 07:48:12.414481 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 07:48:12 crc kubenswrapper[4612]: I1203 07:48:12.786236 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 07:48:12 crc kubenswrapper[4612]: I1203 07:48:12.791562 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.132125 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-llz5b"] Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.133621 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.148489 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-llz5b"] Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.273816 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.273979 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-config\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.274014 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.274096 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.274146 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl5n9\" (UniqueName: \"kubernetes.io/projected/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-kube-api-access-nl5n9\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.274163 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.375868 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-config\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.375924 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.375986 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.376029 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl5n9\" (UniqueName: \"kubernetes.io/projected/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-kube-api-access-nl5n9\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.376047 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.376073 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.376882 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.377212 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-config\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.377474 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.377889 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.378466 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.418772 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl5n9\" (UniqueName: \"kubernetes.io/projected/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-kube-api-access-nl5n9\") pod \"dnsmasq-dns-5c7b6c5df9-llz5b\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.457015 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:13 crc kubenswrapper[4612]: I1203 07:48:13.959858 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-llz5b"] Dec 03 07:48:14 crc kubenswrapper[4612]: I1203 07:48:14.800556 4612 generic.go:334] "Generic (PLEG): container finished" podID="6e2cf39c-6b71-49e5-b776-708ca1bf19f7" containerID="924b07482ca700f86a5b48c3cf8867796cfe6dd37dddc8d39ebd66cd4e7efcea" exitCode=0 Dec 03 07:48:14 crc kubenswrapper[4612]: I1203 07:48:14.800646 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" event={"ID":"6e2cf39c-6b71-49e5-b776-708ca1bf19f7","Type":"ContainerDied","Data":"924b07482ca700f86a5b48c3cf8867796cfe6dd37dddc8d39ebd66cd4e7efcea"} Dec 03 07:48:14 crc kubenswrapper[4612]: I1203 07:48:14.800866 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" event={"ID":"6e2cf39c-6b71-49e5-b776-708ca1bf19f7","Type":"ContainerStarted","Data":"16d143c143850045c9d3a6361b4c569c74e5f7d0168570056f49dc519c7fa3e9"} Dec 03 07:48:15 crc kubenswrapper[4612]: I1203 07:48:15.443592 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:15 crc kubenswrapper[4612]: I1203 07:48:15.470049 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 07:48:15 crc kubenswrapper[4612]: I1203 07:48:15.470104 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 07:48:15 crc kubenswrapper[4612]: I1203 07:48:15.809499 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" event={"ID":"6e2cf39c-6b71-49e5-b776-708ca1bf19f7","Type":"ContainerStarted","Data":"1c687ffc03f4ccbc71f0955fceafb323600f1cb8fe1e42dd315e686ea918420c"} Dec 03 07:48:15 crc kubenswrapper[4612]: I1203 07:48:15.810110 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:15 crc kubenswrapper[4612]: I1203 07:48:15.831594 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" podStartSLOduration=2.8315787180000003 podStartE2EDuration="2.831578718s" podCreationTimestamp="2025-12-03 07:48:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:48:15.823740174 +0000 UTC m=+1258.997097574" watchObservedRunningTime="2025-12-03 07:48:15.831578718 +0000 UTC m=+1259.004936128" Dec 03 07:48:15 crc kubenswrapper[4612]: I1203 07:48:15.867132 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:48:15 crc kubenswrapper[4612]: I1203 07:48:15.867332 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" containerName="nova-api-log" containerID="cri-o://d8cf9da7e19b59620353c5f325172c4ddac8be7ae0d5a5952363b0bf5078c7a7" gracePeriod=30 Dec 03 07:48:15 crc kubenswrapper[4612]: I1203 07:48:15.867425 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" containerName="nova-api-api" containerID="cri-o://1309e538bf496d0fff03c1be16118714e7169817a7aacfc13150e6c72731d502" gracePeriod=30 Dec 03 07:48:16 crc kubenswrapper[4612]: I1203 07:48:16.541802 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:48:16 crc kubenswrapper[4612]: I1203 07:48:16.542148 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="ceilometer-central-agent" containerID="cri-o://e7b414fcf8cbce2dc9cffa2ca36674ed1d15f1a3b71c95d0ac4383e2f8277c61" gracePeriod=30 Dec 03 07:48:16 crc kubenswrapper[4612]: I1203 07:48:16.542204 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="ceilometer-notification-agent" containerID="cri-o://26417732c7ec6f5213d56223fea20a7200091f05a67b4f504f95c01b7a9b2e8f" gracePeriod=30 Dec 03 07:48:16 crc kubenswrapper[4612]: I1203 07:48:16.542218 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="sg-core" containerID="cri-o://c01e614bbbb948346279da520ee18afda134c0f55c3e9f424a70b14275c1d535" gracePeriod=30 Dec 03 07:48:16 crc kubenswrapper[4612]: I1203 07:48:16.542247 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="proxy-httpd" containerID="cri-o://5e20d15d3cc5a0d6c80d544901ea88c2963df2ec1eaec5abe5fb2991d0a9931e" gracePeriod=30 Dec 03 07:48:16 crc kubenswrapper[4612]: I1203 07:48:16.820543 4612 generic.go:334] "Generic (PLEG): container finished" podID="b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" containerID="d8cf9da7e19b59620353c5f325172c4ddac8be7ae0d5a5952363b0bf5078c7a7" exitCode=143 Dec 03 07:48:16 crc kubenswrapper[4612]: I1203 07:48:16.820632 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d","Type":"ContainerDied","Data":"d8cf9da7e19b59620353c5f325172c4ddac8be7ae0d5a5952363b0bf5078c7a7"} Dec 03 07:48:16 crc kubenswrapper[4612]: I1203 07:48:16.824363 4612 generic.go:334] "Generic (PLEG): container finished" podID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerID="5e20d15d3cc5a0d6c80d544901ea88c2963df2ec1eaec5abe5fb2991d0a9931e" exitCode=0 Dec 03 07:48:16 crc kubenswrapper[4612]: I1203 07:48:16.824407 4612 generic.go:334] "Generic (PLEG): container finished" podID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerID="c01e614bbbb948346279da520ee18afda134c0f55c3e9f424a70b14275c1d535" exitCode=2 Dec 03 07:48:16 crc kubenswrapper[4612]: I1203 07:48:16.824414 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ae5e107-dd15-438e-9d41-6ed9c0c46b26","Type":"ContainerDied","Data":"5e20d15d3cc5a0d6c80d544901ea88c2963df2ec1eaec5abe5fb2991d0a9931e"} Dec 03 07:48:16 crc kubenswrapper[4612]: I1203 07:48:16.824461 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ae5e107-dd15-438e-9d41-6ed9c0c46b26","Type":"ContainerDied","Data":"c01e614bbbb948346279da520ee18afda134c0f55c3e9f424a70b14275c1d535"} Dec 03 07:48:17 crc kubenswrapper[4612]: I1203 07:48:17.852972 4612 generic.go:334] "Generic (PLEG): container finished" podID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerID="e7b414fcf8cbce2dc9cffa2ca36674ed1d15f1a3b71c95d0ac4383e2f8277c61" exitCode=0 Dec 03 07:48:17 crc kubenswrapper[4612]: I1203 07:48:17.853134 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ae5e107-dd15-438e-9d41-6ed9c0c46b26","Type":"ContainerDied","Data":"e7b414fcf8cbce2dc9cffa2ca36674ed1d15f1a3b71c95d0ac4383e2f8277c61"} Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.588304 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.596890 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.697210 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-logs\") pod \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.697322 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-combined-ca-bundle\") pod \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.697369 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-ceilometer-tls-certs\") pod \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.697412 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-sg-core-conf-yaml\") pod \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.697465 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-scripts\") pod \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.697499 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-config-data\") pod \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.697527 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-config-data\") pod \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.697561 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-combined-ca-bundle\") pod \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.697623 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7dmk\" (UniqueName: \"kubernetes.io/projected/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-kube-api-access-g7dmk\") pod \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\" (UID: \"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d\") " Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.697652 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-run-httpd\") pod \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.697695 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-log-httpd\") pod \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.697733 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdqnp\" (UniqueName: \"kubernetes.io/projected/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-kube-api-access-cdqnp\") pod \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\" (UID: \"1ae5e107-dd15-438e-9d41-6ed9c0c46b26\") " Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.703273 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-logs" (OuterVolumeSpecName: "logs") pod "b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" (UID: "b9e8f8ed-d6d8-49c9-855d-da71de9bd12d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.704545 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-kube-api-access-cdqnp" (OuterVolumeSpecName: "kube-api-access-cdqnp") pod "1ae5e107-dd15-438e-9d41-6ed9c0c46b26" (UID: "1ae5e107-dd15-438e-9d41-6ed9c0c46b26"). InnerVolumeSpecName "kube-api-access-cdqnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.704923 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1ae5e107-dd15-438e-9d41-6ed9c0c46b26" (UID: "1ae5e107-dd15-438e-9d41-6ed9c0c46b26"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.709819 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-kube-api-access-g7dmk" (OuterVolumeSpecName: "kube-api-access-g7dmk") pod "b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" (UID: "b9e8f8ed-d6d8-49c9-855d-da71de9bd12d"). InnerVolumeSpecName "kube-api-access-g7dmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.710236 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1ae5e107-dd15-438e-9d41-6ed9c0c46b26" (UID: "1ae5e107-dd15-438e-9d41-6ed9c0c46b26"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.716430 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-scripts" (OuterVolumeSpecName: "scripts") pod "1ae5e107-dd15-438e-9d41-6ed9c0c46b26" (UID: "1ae5e107-dd15-438e-9d41-6ed9c0c46b26"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.754128 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-config-data" (OuterVolumeSpecName: "config-data") pod "b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" (UID: "b9e8f8ed-d6d8-49c9-855d-da71de9bd12d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.771060 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1ae5e107-dd15-438e-9d41-6ed9c0c46b26" (UID: "1ae5e107-dd15-438e-9d41-6ed9c0c46b26"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.779692 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" (UID: "b9e8f8ed-d6d8-49c9-855d-da71de9bd12d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.801366 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.801676 4612 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.801687 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.801698 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.801709 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.801719 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7dmk\" (UniqueName: \"kubernetes.io/projected/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d-kube-api-access-g7dmk\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.801728 4612 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.801739 4612 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.801749 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdqnp\" (UniqueName: \"kubernetes.io/projected/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-kube-api-access-cdqnp\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.821236 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "1ae5e107-dd15-438e-9d41-6ed9c0c46b26" (UID: "1ae5e107-dd15-438e-9d41-6ed9c0c46b26"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.836542 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1ae5e107-dd15-438e-9d41-6ed9c0c46b26" (UID: "1ae5e107-dd15-438e-9d41-6ed9c0c46b26"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.860652 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-config-data" (OuterVolumeSpecName: "config-data") pod "1ae5e107-dd15-438e-9d41-6ed9c0c46b26" (UID: "1ae5e107-dd15-438e-9d41-6ed9c0c46b26"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.871884 4612 generic.go:334] "Generic (PLEG): container finished" podID="b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" containerID="1309e538bf496d0fff03c1be16118714e7169817a7aacfc13150e6c72731d502" exitCode=0 Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.872006 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d","Type":"ContainerDied","Data":"1309e538bf496d0fff03c1be16118714e7169817a7aacfc13150e6c72731d502"} Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.872058 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b9e8f8ed-d6d8-49c9-855d-da71de9bd12d","Type":"ContainerDied","Data":"8268e2497c383c88699ce9cbd4b4a8231840ebb203833f1c458cb500888a2b76"} Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.872079 4612 scope.go:117] "RemoveContainer" containerID="1309e538bf496d0fff03c1be16118714e7169817a7aacfc13150e6c72731d502" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.872574 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.875679 4612 generic.go:334] "Generic (PLEG): container finished" podID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerID="26417732c7ec6f5213d56223fea20a7200091f05a67b4f504f95c01b7a9b2e8f" exitCode=0 Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.875713 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ae5e107-dd15-438e-9d41-6ed9c0c46b26","Type":"ContainerDied","Data":"26417732c7ec6f5213d56223fea20a7200091f05a67b4f504f95c01b7a9b2e8f"} Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.875734 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ae5e107-dd15-438e-9d41-6ed9c0c46b26","Type":"ContainerDied","Data":"5c222c586f95104a0f5749f5ff8d203ed37e755da1925b8b9174e3676b35b683"} Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.875796 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.909183 4612 scope.go:117] "RemoveContainer" containerID="d8cf9da7e19b59620353c5f325172c4ddac8be7ae0d5a5952363b0bf5078c7a7" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.910750 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.910776 4612 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.910785 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ae5e107-dd15-438e-9d41-6ed9c0c46b26-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.917817 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.929975 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.941378 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.941813 4612 scope.go:117] "RemoveContainer" containerID="1309e538bf496d0fff03c1be16118714e7169817a7aacfc13150e6c72731d502" Dec 03 07:48:19 crc kubenswrapper[4612]: E1203 07:48:19.942837 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1309e538bf496d0fff03c1be16118714e7169817a7aacfc13150e6c72731d502\": container with ID starting with 1309e538bf496d0fff03c1be16118714e7169817a7aacfc13150e6c72731d502 not found: ID does not exist" containerID="1309e538bf496d0fff03c1be16118714e7169817a7aacfc13150e6c72731d502" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.942870 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1309e538bf496d0fff03c1be16118714e7169817a7aacfc13150e6c72731d502"} err="failed to get container status \"1309e538bf496d0fff03c1be16118714e7169817a7aacfc13150e6c72731d502\": rpc error: code = NotFound desc = could not find container \"1309e538bf496d0fff03c1be16118714e7169817a7aacfc13150e6c72731d502\": container with ID starting with 1309e538bf496d0fff03c1be16118714e7169817a7aacfc13150e6c72731d502 not found: ID does not exist" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.942892 4612 scope.go:117] "RemoveContainer" containerID="d8cf9da7e19b59620353c5f325172c4ddac8be7ae0d5a5952363b0bf5078c7a7" Dec 03 07:48:19 crc kubenswrapper[4612]: E1203 07:48:19.943308 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8cf9da7e19b59620353c5f325172c4ddac8be7ae0d5a5952363b0bf5078c7a7\": container with ID starting with d8cf9da7e19b59620353c5f325172c4ddac8be7ae0d5a5952363b0bf5078c7a7 not found: ID does not exist" containerID="d8cf9da7e19b59620353c5f325172c4ddac8be7ae0d5a5952363b0bf5078c7a7" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.943328 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8cf9da7e19b59620353c5f325172c4ddac8be7ae0d5a5952363b0bf5078c7a7"} err="failed to get container status \"d8cf9da7e19b59620353c5f325172c4ddac8be7ae0d5a5952363b0bf5078c7a7\": rpc error: code = NotFound desc = could not find container \"d8cf9da7e19b59620353c5f325172c4ddac8be7ae0d5a5952363b0bf5078c7a7\": container with ID starting with d8cf9da7e19b59620353c5f325172c4ddac8be7ae0d5a5952363b0bf5078c7a7 not found: ID does not exist" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.943350 4612 scope.go:117] "RemoveContainer" containerID="5e20d15d3cc5a0d6c80d544901ea88c2963df2ec1eaec5abe5fb2991d0a9931e" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.967997 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.979861 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 07:48:19 crc kubenswrapper[4612]: E1203 07:48:19.980451 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="sg-core" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.980474 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="sg-core" Dec 03 07:48:19 crc kubenswrapper[4612]: E1203 07:48:19.980491 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="ceilometer-notification-agent" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.980500 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="ceilometer-notification-agent" Dec 03 07:48:19 crc kubenswrapper[4612]: E1203 07:48:19.980519 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="proxy-httpd" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.980528 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="proxy-httpd" Dec 03 07:48:19 crc kubenswrapper[4612]: E1203 07:48:19.980546 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="ceilometer-central-agent" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.980554 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="ceilometer-central-agent" Dec 03 07:48:19 crc kubenswrapper[4612]: E1203 07:48:19.980566 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" containerName="nova-api-api" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.980573 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" containerName="nova-api-api" Dec 03 07:48:19 crc kubenswrapper[4612]: E1203 07:48:19.980594 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" containerName="nova-api-log" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.980602 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" containerName="nova-api-log" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.980813 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="ceilometer-central-agent" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.980834 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="sg-core" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.980852 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" containerName="nova-api-api" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.980864 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="proxy-httpd" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.980893 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" containerName="nova-api-log" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.980907 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" containerName="ceilometer-notification-agent" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.982223 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 07:48:19 crc kubenswrapper[4612]: I1203 07:48:19.992841 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.005546 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.005735 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.005832 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.006403 4612 scope.go:117] "RemoveContainer" containerID="c01e614bbbb948346279da520ee18afda134c0f55c3e9f424a70b14275c1d535" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.021884 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.024072 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.030507 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.030975 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.031325 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.041138 4612 scope.go:117] "RemoveContainer" containerID="26417732c7ec6f5213d56223fea20a7200091f05a67b4f504f95c01b7a9b2e8f" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.043351 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.095056 4612 scope.go:117] "RemoveContainer" containerID="e7b414fcf8cbce2dc9cffa2ca36674ed1d15f1a3b71c95d0ac4383e2f8277c61" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.116316 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.116378 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-logs\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.116406 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.116432 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-public-tls-certs\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.116482 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65hvl\" (UniqueName: \"kubernetes.io/projected/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-kube-api-access-65hvl\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.116512 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-internal-tls-certs\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.116533 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3867ea54-d402-4641-936e-9038ce646012-log-httpd\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.116593 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-scripts\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.116616 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.116638 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3867ea54-d402-4641-936e-9038ce646012-run-httpd\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.116663 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.116685 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8pdt\" (UniqueName: \"kubernetes.io/projected/3867ea54-d402-4641-936e-9038ce646012-kube-api-access-h8pdt\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.116711 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-config-data\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.116743 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-config-data\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.180270 4612 scope.go:117] "RemoveContainer" containerID="5e20d15d3cc5a0d6c80d544901ea88c2963df2ec1eaec5abe5fb2991d0a9931e" Dec 03 07:48:20 crc kubenswrapper[4612]: E1203 07:48:20.180724 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e20d15d3cc5a0d6c80d544901ea88c2963df2ec1eaec5abe5fb2991d0a9931e\": container with ID starting with 5e20d15d3cc5a0d6c80d544901ea88c2963df2ec1eaec5abe5fb2991d0a9931e not found: ID does not exist" containerID="5e20d15d3cc5a0d6c80d544901ea88c2963df2ec1eaec5abe5fb2991d0a9931e" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.180770 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e20d15d3cc5a0d6c80d544901ea88c2963df2ec1eaec5abe5fb2991d0a9931e"} err="failed to get container status \"5e20d15d3cc5a0d6c80d544901ea88c2963df2ec1eaec5abe5fb2991d0a9931e\": rpc error: code = NotFound desc = could not find container \"5e20d15d3cc5a0d6c80d544901ea88c2963df2ec1eaec5abe5fb2991d0a9931e\": container with ID starting with 5e20d15d3cc5a0d6c80d544901ea88c2963df2ec1eaec5abe5fb2991d0a9931e not found: ID does not exist" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.180790 4612 scope.go:117] "RemoveContainer" containerID="c01e614bbbb948346279da520ee18afda134c0f55c3e9f424a70b14275c1d535" Dec 03 07:48:20 crc kubenswrapper[4612]: E1203 07:48:20.181274 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c01e614bbbb948346279da520ee18afda134c0f55c3e9f424a70b14275c1d535\": container with ID starting with c01e614bbbb948346279da520ee18afda134c0f55c3e9f424a70b14275c1d535 not found: ID does not exist" containerID="c01e614bbbb948346279da520ee18afda134c0f55c3e9f424a70b14275c1d535" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.181313 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c01e614bbbb948346279da520ee18afda134c0f55c3e9f424a70b14275c1d535"} err="failed to get container status \"c01e614bbbb948346279da520ee18afda134c0f55c3e9f424a70b14275c1d535\": rpc error: code = NotFound desc = could not find container \"c01e614bbbb948346279da520ee18afda134c0f55c3e9f424a70b14275c1d535\": container with ID starting with c01e614bbbb948346279da520ee18afda134c0f55c3e9f424a70b14275c1d535 not found: ID does not exist" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.181338 4612 scope.go:117] "RemoveContainer" containerID="26417732c7ec6f5213d56223fea20a7200091f05a67b4f504f95c01b7a9b2e8f" Dec 03 07:48:20 crc kubenswrapper[4612]: E1203 07:48:20.181666 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26417732c7ec6f5213d56223fea20a7200091f05a67b4f504f95c01b7a9b2e8f\": container with ID starting with 26417732c7ec6f5213d56223fea20a7200091f05a67b4f504f95c01b7a9b2e8f not found: ID does not exist" containerID="26417732c7ec6f5213d56223fea20a7200091f05a67b4f504f95c01b7a9b2e8f" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.181691 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26417732c7ec6f5213d56223fea20a7200091f05a67b4f504f95c01b7a9b2e8f"} err="failed to get container status \"26417732c7ec6f5213d56223fea20a7200091f05a67b4f504f95c01b7a9b2e8f\": rpc error: code = NotFound desc = could not find container \"26417732c7ec6f5213d56223fea20a7200091f05a67b4f504f95c01b7a9b2e8f\": container with ID starting with 26417732c7ec6f5213d56223fea20a7200091f05a67b4f504f95c01b7a9b2e8f not found: ID does not exist" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.181707 4612 scope.go:117] "RemoveContainer" containerID="e7b414fcf8cbce2dc9cffa2ca36674ed1d15f1a3b71c95d0ac4383e2f8277c61" Dec 03 07:48:20 crc kubenswrapper[4612]: E1203 07:48:20.181972 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7b414fcf8cbce2dc9cffa2ca36674ed1d15f1a3b71c95d0ac4383e2f8277c61\": container with ID starting with e7b414fcf8cbce2dc9cffa2ca36674ed1d15f1a3b71c95d0ac4383e2f8277c61 not found: ID does not exist" containerID="e7b414fcf8cbce2dc9cffa2ca36674ed1d15f1a3b71c95d0ac4383e2f8277c61" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.181993 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7b414fcf8cbce2dc9cffa2ca36674ed1d15f1a3b71c95d0ac4383e2f8277c61"} err="failed to get container status \"e7b414fcf8cbce2dc9cffa2ca36674ed1d15f1a3b71c95d0ac4383e2f8277c61\": rpc error: code = NotFound desc = could not find container \"e7b414fcf8cbce2dc9cffa2ca36674ed1d15f1a3b71c95d0ac4383e2f8277c61\": container with ID starting with e7b414fcf8cbce2dc9cffa2ca36674ed1d15f1a3b71c95d0ac4383e2f8277c61 not found: ID does not exist" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.217752 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-logs\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.217798 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.217826 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-public-tls-certs\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.217869 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65hvl\" (UniqueName: \"kubernetes.io/projected/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-kube-api-access-65hvl\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.217896 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-internal-tls-certs\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.217914 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3867ea54-d402-4641-936e-9038ce646012-log-httpd\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.217974 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-scripts\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.217992 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.218010 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3867ea54-d402-4641-936e-9038ce646012-run-httpd\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.218028 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.218047 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8pdt\" (UniqueName: \"kubernetes.io/projected/3867ea54-d402-4641-936e-9038ce646012-kube-api-access-h8pdt\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.218069 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-config-data\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.218094 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-config-data\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.218121 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.218262 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-logs\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.218555 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3867ea54-d402-4641-936e-9038ce646012-run-httpd\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.219069 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3867ea54-d402-4641-936e-9038ce646012-log-httpd\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.222468 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-public-tls-certs\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.224040 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-config-data\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.224546 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.225271 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.226451 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.227014 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-scripts\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.227103 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-internal-tls-certs\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.227569 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.229233 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3867ea54-d402-4641-936e-9038ce646012-config-data\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.246594 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65hvl\" (UniqueName: \"kubernetes.io/projected/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-kube-api-access-65hvl\") pod \"nova-api-0\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.246606 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8pdt\" (UniqueName: \"kubernetes.io/projected/3867ea54-d402-4641-936e-9038ce646012-kube-api-access-h8pdt\") pod \"ceilometer-0\" (UID: \"3867ea54-d402-4641-936e-9038ce646012\") " pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.322339 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.364510 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.443688 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.461153 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.471365 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.471408 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 07:48:20 crc kubenswrapper[4612]: W1203 07:48:20.828685 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12ad6f8a_1d13_4e56_93c6_27fb7b0cdbbd.slice/crio-c03fe91d6e79578e4e699d73aec0d1dbc3dbc95d29042a9a033c3714ce89b65e WatchSource:0}: Error finding container c03fe91d6e79578e4e699d73aec0d1dbc3dbc95d29042a9a033c3714ce89b65e: Status 404 returned error can't find the container with id c03fe91d6e79578e4e699d73aec0d1dbc3dbc95d29042a9a033c3714ce89b65e Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.830800 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.886780 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd","Type":"ContainerStarted","Data":"c03fe91d6e79578e4e699d73aec0d1dbc3dbc95d29042a9a033c3714ce89b65e"} Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.919721 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 03 07:48:20 crc kubenswrapper[4612]: W1203 07:48:20.953285 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3867ea54_d402_4641_936e_9038ce646012.slice/crio-0d70d0c17413e62105069773b76b76cca2642eaaa8fd8e1010c62576d37ac622 WatchSource:0}: Error finding container 0d70d0c17413e62105069773b76b76cca2642eaaa8fd8e1010c62576d37ac622: Status 404 returned error can't find the container with id 0d70d0c17413e62105069773b76b76cca2642eaaa8fd8e1010c62576d37ac622 Dec 03 07:48:20 crc kubenswrapper[4612]: I1203 07:48:20.969318 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.121072 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ae5e107-dd15-438e-9d41-6ed9c0c46b26" path="/var/lib/kubelet/pods/1ae5e107-dd15-438e-9d41-6ed9c0c46b26/volumes" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.122330 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9e8f8ed-d6d8-49c9-855d-da71de9bd12d" path="/var/lib/kubelet/pods/b9e8f8ed-d6d8-49c9-855d-da71de9bd12d/volumes" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.159847 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-66qkn"] Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.167096 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.181102 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-66qkn"] Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.181527 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.181744 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.237504 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-config-data\") pod \"nova-cell1-cell-mapping-66qkn\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.237834 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-66qkn\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.239459 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcqpk\" (UniqueName: \"kubernetes.io/projected/94028f3c-def3-439a-9a40-07c39f058702-kube-api-access-kcqpk\") pod \"nova-cell1-cell-mapping-66qkn\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.239685 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-scripts\") pod \"nova-cell1-cell-mapping-66qkn\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.341988 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-config-data\") pod \"nova-cell1-cell-mapping-66qkn\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.342054 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-66qkn\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.342094 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcqpk\" (UniqueName: \"kubernetes.io/projected/94028f3c-def3-439a-9a40-07c39f058702-kube-api-access-kcqpk\") pod \"nova-cell1-cell-mapping-66qkn\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.342158 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-scripts\") pod \"nova-cell1-cell-mapping-66qkn\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.350971 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-scripts\") pod \"nova-cell1-cell-mapping-66qkn\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.351601 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-config-data\") pod \"nova-cell1-cell-mapping-66qkn\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.352130 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-66qkn\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.359166 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcqpk\" (UniqueName: \"kubernetes.io/projected/94028f3c-def3-439a-9a40-07c39f058702-kube-api-access-kcqpk\") pod \"nova-cell1-cell-mapping-66qkn\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.482099 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.482122 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.534389 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.895858 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd","Type":"ContainerStarted","Data":"afb51e1ee49ba3d0356585b1329b7e7405a436ae122edc32173ee0c3cfb877ab"} Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.896188 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd","Type":"ContainerStarted","Data":"89f287b3fe40f4706db9fa720705f2fe1b7f156f55bcb28f931a0aba0ed56cdc"} Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.898856 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3867ea54-d402-4641-936e-9038ce646012","Type":"ContainerStarted","Data":"1d11126bee6a0cab371e45d0f3521f50833179b746526272d8144ad7753cb365"} Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.898880 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3867ea54-d402-4641-936e-9038ce646012","Type":"ContainerStarted","Data":"0d70d0c17413e62105069773b76b76cca2642eaaa8fd8e1010c62576d37ac622"} Dec 03 07:48:21 crc kubenswrapper[4612]: I1203 07:48:21.918977 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.918959901 podStartE2EDuration="2.918959901s" podCreationTimestamp="2025-12-03 07:48:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:48:21.913341962 +0000 UTC m=+1265.086699372" watchObservedRunningTime="2025-12-03 07:48:21.918959901 +0000 UTC m=+1265.092317301" Dec 03 07:48:22 crc kubenswrapper[4612]: I1203 07:48:22.013506 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-66qkn"] Dec 03 07:48:22 crc kubenswrapper[4612]: W1203 07:48:22.017358 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94028f3c_def3_439a_9a40_07c39f058702.slice/crio-a1130483242c58a602a9eafd9b68942496f038d8af045333e6d6ce338b352dd6 WatchSource:0}: Error finding container a1130483242c58a602a9eafd9b68942496f038d8af045333e6d6ce338b352dd6: Status 404 returned error can't find the container with id a1130483242c58a602a9eafd9b68942496f038d8af045333e6d6ce338b352dd6 Dec 03 07:48:22 crc kubenswrapper[4612]: I1203 07:48:22.911135 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3867ea54-d402-4641-936e-9038ce646012","Type":"ContainerStarted","Data":"c1ff96c903509714e02d6d71587dc6f9cdf19ecbe324d279ed5f066567baef97"} Dec 03 07:48:22 crc kubenswrapper[4612]: I1203 07:48:22.912666 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-66qkn" event={"ID":"94028f3c-def3-439a-9a40-07c39f058702","Type":"ContainerStarted","Data":"c8bdac0076f6d078133ee2b1841130120fbf19dd228a40d36f691c6c6d19d551"} Dec 03 07:48:22 crc kubenswrapper[4612]: I1203 07:48:22.912693 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-66qkn" event={"ID":"94028f3c-def3-439a-9a40-07c39f058702","Type":"ContainerStarted","Data":"a1130483242c58a602a9eafd9b68942496f038d8af045333e6d6ce338b352dd6"} Dec 03 07:48:22 crc kubenswrapper[4612]: I1203 07:48:22.937597 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-66qkn" podStartSLOduration=1.9375778719999999 podStartE2EDuration="1.937577872s" podCreationTimestamp="2025-12-03 07:48:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:48:22.929745249 +0000 UTC m=+1266.103102659" watchObservedRunningTime="2025-12-03 07:48:22.937577872 +0000 UTC m=+1266.110935282" Dec 03 07:48:23 crc kubenswrapper[4612]: I1203 07:48:23.458862 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:48:23 crc kubenswrapper[4612]: I1203 07:48:23.526137 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-rzcgj"] Dec 03 07:48:23 crc kubenswrapper[4612]: I1203 07:48:23.526360 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" podUID="bdbc5b1d-28f6-46f0-8cca-4a163110d00e" containerName="dnsmasq-dns" containerID="cri-o://531b7597e566158de1081d8456b89e683c201062f7ae557ceb4b334405fd56a8" gracePeriod=10 Dec 03 07:48:23 crc kubenswrapper[4612]: I1203 07:48:23.936414 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3867ea54-d402-4641-936e-9038ce646012","Type":"ContainerStarted","Data":"62061188ac2d8b9604176aaafab304a0d6e0e33f4d8658b03ad7f653031af767"} Dec 03 07:48:23 crc kubenswrapper[4612]: I1203 07:48:23.951615 4612 generic.go:334] "Generic (PLEG): container finished" podID="bdbc5b1d-28f6-46f0-8cca-4a163110d00e" containerID="531b7597e566158de1081d8456b89e683c201062f7ae557ceb4b334405fd56a8" exitCode=0 Dec 03 07:48:23 crc kubenswrapper[4612]: I1203 07:48:23.951872 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" event={"ID":"bdbc5b1d-28f6-46f0-8cca-4a163110d00e","Type":"ContainerDied","Data":"531b7597e566158de1081d8456b89e683c201062f7ae557ceb4b334405fd56a8"} Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.038522 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.092978 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-dns-svc\") pod \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.093104 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-ovsdbserver-nb\") pod \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.093134 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8kwwh\" (UniqueName: \"kubernetes.io/projected/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-kube-api-access-8kwwh\") pod \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.093160 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-dns-swift-storage-0\") pod \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.093255 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-config\") pod \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.093284 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-ovsdbserver-sb\") pod \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\" (UID: \"bdbc5b1d-28f6-46f0-8cca-4a163110d00e\") " Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.174900 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-kube-api-access-8kwwh" (OuterVolumeSpecName: "kube-api-access-8kwwh") pod "bdbc5b1d-28f6-46f0-8cca-4a163110d00e" (UID: "bdbc5b1d-28f6-46f0-8cca-4a163110d00e"). InnerVolumeSpecName "kube-api-access-8kwwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.199093 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8kwwh\" (UniqueName: \"kubernetes.io/projected/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-kube-api-access-8kwwh\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.223543 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bdbc5b1d-28f6-46f0-8cca-4a163110d00e" (UID: "bdbc5b1d-28f6-46f0-8cca-4a163110d00e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.256415 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bdbc5b1d-28f6-46f0-8cca-4a163110d00e" (UID: "bdbc5b1d-28f6-46f0-8cca-4a163110d00e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.258428 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bdbc5b1d-28f6-46f0-8cca-4a163110d00e" (UID: "bdbc5b1d-28f6-46f0-8cca-4a163110d00e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.268525 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-config" (OuterVolumeSpecName: "config") pod "bdbc5b1d-28f6-46f0-8cca-4a163110d00e" (UID: "bdbc5b1d-28f6-46f0-8cca-4a163110d00e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.288364 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bdbc5b1d-28f6-46f0-8cca-4a163110d00e" (UID: "bdbc5b1d-28f6-46f0-8cca-4a163110d00e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.301044 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.301074 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.301084 4612 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.301095 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.301104 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bdbc5b1d-28f6-46f0-8cca-4a163110d00e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.962070 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" event={"ID":"bdbc5b1d-28f6-46f0-8cca-4a163110d00e","Type":"ContainerDied","Data":"1e121674b70d33f5f67481fb9d4ecbdb7a7ebfbfea978b61132dcb2ebf1d8a1e"} Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.962387 4612 scope.go:117] "RemoveContainer" containerID="531b7597e566158de1081d8456b89e683c201062f7ae557ceb4b334405fd56a8" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.962102 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-rzcgj" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.969266 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3867ea54-d402-4641-936e-9038ce646012","Type":"ContainerStarted","Data":"b20c33c9fa80f612286345f04b75f698d5ca1c247603b5da869cae4476c8315f"} Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.969672 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 07:48:24 crc kubenswrapper[4612]: I1203 07:48:24.991005 4612 scope.go:117] "RemoveContainer" containerID="d6be38c30d036590da17c668bd473ba55009655eb1ee2c5f66fab8d9cc346bc7" Dec 03 07:48:25 crc kubenswrapper[4612]: I1203 07:48:25.037404 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.847052983 podStartE2EDuration="6.037383691s" podCreationTimestamp="2025-12-03 07:48:19 +0000 UTC" firstStartedPulling="2025-12-03 07:48:20.955484674 +0000 UTC m=+1264.128842074" lastFinishedPulling="2025-12-03 07:48:24.145815342 +0000 UTC m=+1267.319172782" observedRunningTime="2025-12-03 07:48:24.996835418 +0000 UTC m=+1268.170192848" watchObservedRunningTime="2025-12-03 07:48:25.037383691 +0000 UTC m=+1268.210741101" Dec 03 07:48:25 crc kubenswrapper[4612]: I1203 07:48:25.040471 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-rzcgj"] Dec 03 07:48:25 crc kubenswrapper[4612]: I1203 07:48:25.050630 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-rzcgj"] Dec 03 07:48:25 crc kubenswrapper[4612]: I1203 07:48:25.099139 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdbc5b1d-28f6-46f0-8cca-4a163110d00e" path="/var/lib/kubelet/pods/bdbc5b1d-28f6-46f0-8cca-4a163110d00e/volumes" Dec 03 07:48:29 crc kubenswrapper[4612]: I1203 07:48:29.009899 4612 generic.go:334] "Generic (PLEG): container finished" podID="94028f3c-def3-439a-9a40-07c39f058702" containerID="c8bdac0076f6d078133ee2b1841130120fbf19dd228a40d36f691c6c6d19d551" exitCode=0 Dec 03 07:48:29 crc kubenswrapper[4612]: I1203 07:48:29.010203 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-66qkn" event={"ID":"94028f3c-def3-439a-9a40-07c39f058702","Type":"ContainerDied","Data":"c8bdac0076f6d078133ee2b1841130120fbf19dd228a40d36f691c6c6d19d551"} Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.323913 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.324172 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.447487 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.478092 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.479266 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.493756 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.529916 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-scripts\") pod \"94028f3c-def3-439a-9a40-07c39f058702\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.529995 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcqpk\" (UniqueName: \"kubernetes.io/projected/94028f3c-def3-439a-9a40-07c39f058702-kube-api-access-kcqpk\") pod \"94028f3c-def3-439a-9a40-07c39f058702\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.530025 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-combined-ca-bundle\") pod \"94028f3c-def3-439a-9a40-07c39f058702\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.530135 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-config-data\") pod \"94028f3c-def3-439a-9a40-07c39f058702\" (UID: \"94028f3c-def3-439a-9a40-07c39f058702\") " Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.537786 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94028f3c-def3-439a-9a40-07c39f058702-kube-api-access-kcqpk" (OuterVolumeSpecName: "kube-api-access-kcqpk") pod "94028f3c-def3-439a-9a40-07c39f058702" (UID: "94028f3c-def3-439a-9a40-07c39f058702"). InnerVolumeSpecName "kube-api-access-kcqpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.563710 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-scripts" (OuterVolumeSpecName: "scripts") pod "94028f3c-def3-439a-9a40-07c39f058702" (UID: "94028f3c-def3-439a-9a40-07c39f058702"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.572975 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94028f3c-def3-439a-9a40-07c39f058702" (UID: "94028f3c-def3-439a-9a40-07c39f058702"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.576554 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-config-data" (OuterVolumeSpecName: "config-data") pod "94028f3c-def3-439a-9a40-07c39f058702" (UID: "94028f3c-def3-439a-9a40-07c39f058702"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.633014 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.633048 4612 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.633057 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcqpk\" (UniqueName: \"kubernetes.io/projected/94028f3c-def3-439a-9a40-07c39f058702-kube-api-access-kcqpk\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:30 crc kubenswrapper[4612]: I1203 07:48:30.633069 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94028f3c-def3-439a-9a40-07c39f058702-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:31 crc kubenswrapper[4612]: I1203 07:48:31.042093 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-66qkn" Dec 03 07:48:31 crc kubenswrapper[4612]: I1203 07:48:31.042160 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-66qkn" event={"ID":"94028f3c-def3-439a-9a40-07c39f058702","Type":"ContainerDied","Data":"a1130483242c58a602a9eafd9b68942496f038d8af045333e6d6ce338b352dd6"} Dec 03 07:48:31 crc kubenswrapper[4612]: I1203 07:48:31.042194 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1130483242c58a602a9eafd9b68942496f038d8af045333e6d6ce338b352dd6" Dec 03 07:48:31 crc kubenswrapper[4612]: I1203 07:48:31.053225 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 07:48:31 crc kubenswrapper[4612]: I1203 07:48:31.259472 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:48:31 crc kubenswrapper[4612]: I1203 07:48:31.260047 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" containerName="nova-api-api" containerID="cri-o://afb51e1ee49ba3d0356585b1329b7e7405a436ae122edc32173ee0c3cfb877ab" gracePeriod=30 Dec 03 07:48:31 crc kubenswrapper[4612]: I1203 07:48:31.260433 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" containerName="nova-api-log" containerID="cri-o://89f287b3fe40f4706db9fa720705f2fe1b7f156f55bcb28f931a0aba0ed56cdc" gracePeriod=30 Dec 03 07:48:31 crc kubenswrapper[4612]: I1203 07:48:31.264670 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.197:8774/\": EOF" Dec 03 07:48:31 crc kubenswrapper[4612]: I1203 07:48:31.276237 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.197:8774/\": EOF" Dec 03 07:48:31 crc kubenswrapper[4612]: I1203 07:48:31.292310 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:48:31 crc kubenswrapper[4612]: I1203 07:48:31.292511 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="64a9499f-d0a5-4581-85df-ff00cf6f4dfd" containerName="nova-scheduler-scheduler" containerID="cri-o://7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81" gracePeriod=30 Dec 03 07:48:31 crc kubenswrapper[4612]: I1203 07:48:31.337609 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:48:32 crc kubenswrapper[4612]: I1203 07:48:32.054069 4612 generic.go:334] "Generic (PLEG): container finished" podID="12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" containerID="89f287b3fe40f4706db9fa720705f2fe1b7f156f55bcb28f931a0aba0ed56cdc" exitCode=143 Dec 03 07:48:32 crc kubenswrapper[4612]: I1203 07:48:32.055340 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd","Type":"ContainerDied","Data":"89f287b3fe40f4706db9fa720705f2fe1b7f156f55bcb28f931a0aba0ed56cdc"} Dec 03 07:48:32 crc kubenswrapper[4612]: E1203 07:48:32.386646 4612 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 07:48:32 crc kubenswrapper[4612]: E1203 07:48:32.387924 4612 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 07:48:32 crc kubenswrapper[4612]: E1203 07:48:32.388933 4612 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 07:48:32 crc kubenswrapper[4612]: E1203 07:48:32.388986 4612 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="64a9499f-d0a5-4581-85df-ff00cf6f4dfd" containerName="nova-scheduler-scheduler" Dec 03 07:48:33 crc kubenswrapper[4612]: I1203 07:48:33.068921 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" containerName="nova-metadata-log" containerID="cri-o://eba7b14d413b6ccd0a8c01776af16f34f4bff5999fc3c1c561e566a6b977edd8" gracePeriod=30 Dec 03 07:48:33 crc kubenswrapper[4612]: I1203 07:48:33.069009 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" containerName="nova-metadata-metadata" containerID="cri-o://5478b30b8b590343b8e2081970ce529c9e82e19e593bb4eb649758f572bc899c" gracePeriod=30 Dec 03 07:48:34 crc kubenswrapper[4612]: I1203 07:48:34.080204 4612 generic.go:334] "Generic (PLEG): container finished" podID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" containerID="eba7b14d413b6ccd0a8c01776af16f34f4bff5999fc3c1c561e566a6b977edd8" exitCode=143 Dec 03 07:48:34 crc kubenswrapper[4612]: I1203 07:48:34.080278 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"32d00d3f-dcaf-4f86-b058-38666d24c6ce","Type":"ContainerDied","Data":"eba7b14d413b6ccd0a8c01776af16f34f4bff5999fc3c1c561e566a6b977edd8"} Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.238096 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": read tcp 10.217.0.2:52276->10.217.0.195:8775: read: connection reset by peer" Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.238260 4612 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": read tcp 10.217.0.2:52288->10.217.0.195:8775: read: connection reset by peer" Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.895108 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.918081 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.940198 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-combined-ca-bundle\") pod \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\" (UID: \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\") " Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.940253 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-combined-ca-bundle\") pod \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.940383 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d00d3f-dcaf-4f86-b058-38666d24c6ce-logs\") pod \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.940417 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llldl\" (UniqueName: \"kubernetes.io/projected/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-kube-api-access-llldl\") pod \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\" (UID: \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\") " Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.940464 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-nova-metadata-tls-certs\") pod \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.940505 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-config-data\") pod \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.940542 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-config-data\") pod \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\" (UID: \"64a9499f-d0a5-4581-85df-ff00cf6f4dfd\") " Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.940593 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgdj6\" (UniqueName: \"kubernetes.io/projected/32d00d3f-dcaf-4f86-b058-38666d24c6ce-kube-api-access-bgdj6\") pod \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\" (UID: \"32d00d3f-dcaf-4f86-b058-38666d24c6ce\") " Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.943711 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32d00d3f-dcaf-4f86-b058-38666d24c6ce-logs" (OuterVolumeSpecName: "logs") pod "32d00d3f-dcaf-4f86-b058-38666d24c6ce" (UID: "32d00d3f-dcaf-4f86-b058-38666d24c6ce"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.953559 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-kube-api-access-llldl" (OuterVolumeSpecName: "kube-api-access-llldl") pod "64a9499f-d0a5-4581-85df-ff00cf6f4dfd" (UID: "64a9499f-d0a5-4581-85df-ff00cf6f4dfd"). InnerVolumeSpecName "kube-api-access-llldl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:48:36 crc kubenswrapper[4612]: I1203 07:48:36.954015 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32d00d3f-dcaf-4f86-b058-38666d24c6ce-kube-api-access-bgdj6" (OuterVolumeSpecName: "kube-api-access-bgdj6") pod "32d00d3f-dcaf-4f86-b058-38666d24c6ce" (UID: "32d00d3f-dcaf-4f86-b058-38666d24c6ce"). InnerVolumeSpecName "kube-api-access-bgdj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.008670 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32d00d3f-dcaf-4f86-b058-38666d24c6ce" (UID: "32d00d3f-dcaf-4f86-b058-38666d24c6ce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.015088 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64a9499f-d0a5-4581-85df-ff00cf6f4dfd" (UID: "64a9499f-d0a5-4581-85df-ff00cf6f4dfd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.024549 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-config-data" (OuterVolumeSpecName: "config-data") pod "32d00d3f-dcaf-4f86-b058-38666d24c6ce" (UID: "32d00d3f-dcaf-4f86-b058-38666d24c6ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.025181 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-config-data" (OuterVolumeSpecName: "config-data") pod "64a9499f-d0a5-4581-85df-ff00cf6f4dfd" (UID: "64a9499f-d0a5-4581-85df-ff00cf6f4dfd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.043929 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.044142 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d00d3f-dcaf-4f86-b058-38666d24c6ce-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.044152 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llldl\" (UniqueName: \"kubernetes.io/projected/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-kube-api-access-llldl\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.044164 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.044172 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.044180 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgdj6\" (UniqueName: \"kubernetes.io/projected/32d00d3f-dcaf-4f86-b058-38666d24c6ce-kube-api-access-bgdj6\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.044188 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64a9499f-d0a5-4581-85df-ff00cf6f4dfd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.049753 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "32d00d3f-dcaf-4f86-b058-38666d24c6ce" (UID: "32d00d3f-dcaf-4f86-b058-38666d24c6ce"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.057620 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.114555 4612 generic.go:334] "Generic (PLEG): container finished" podID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" containerID="5478b30b8b590343b8e2081970ce529c9e82e19e593bb4eb649758f572bc899c" exitCode=0 Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.114697 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"32d00d3f-dcaf-4f86-b058-38666d24c6ce","Type":"ContainerDied","Data":"5478b30b8b590343b8e2081970ce529c9e82e19e593bb4eb649758f572bc899c"} Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.114730 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"32d00d3f-dcaf-4f86-b058-38666d24c6ce","Type":"ContainerDied","Data":"0ee97b3664277a0dc7502eabcdef8dabea0f3dca346830d453f572af65b89fc7"} Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.114751 4612 scope.go:117] "RemoveContainer" containerID="5478b30b8b590343b8e2081970ce529c9e82e19e593bb4eb649758f572bc899c" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.114914 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.117037 4612 generic.go:334] "Generic (PLEG): container finished" podID="64a9499f-d0a5-4581-85df-ff00cf6f4dfd" containerID="7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81" exitCode=0 Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.117105 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.117106 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"64a9499f-d0a5-4581-85df-ff00cf6f4dfd","Type":"ContainerDied","Data":"7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81"} Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.117217 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"64a9499f-d0a5-4581-85df-ff00cf6f4dfd","Type":"ContainerDied","Data":"48a3465e1b4c09d72e1cb7dd6f0e80eeadca056d7395660d348329f4e4ea25a3"} Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.127049 4612 generic.go:334] "Generic (PLEG): container finished" podID="12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" containerID="afb51e1ee49ba3d0356585b1329b7e7405a436ae122edc32173ee0c3cfb877ab" exitCode=0 Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.127095 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd","Type":"ContainerDied","Data":"afb51e1ee49ba3d0356585b1329b7e7405a436ae122edc32173ee0c3cfb877ab"} Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.127122 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd","Type":"ContainerDied","Data":"c03fe91d6e79578e4e699d73aec0d1dbc3dbc95d29042a9a033c3714ce89b65e"} Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.127181 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.146316 4612 scope.go:117] "RemoveContainer" containerID="eba7b14d413b6ccd0a8c01776af16f34f4bff5999fc3c1c561e566a6b977edd8" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.148269 4612 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d00d3f-dcaf-4f86-b058-38666d24c6ce-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.156275 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.169792 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.204247 4612 scope.go:117] "RemoveContainer" containerID="5478b30b8b590343b8e2081970ce529c9e82e19e593bb4eb649758f572bc899c" Dec 03 07:48:37 crc kubenswrapper[4612]: E1203 07:48:37.204952 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5478b30b8b590343b8e2081970ce529c9e82e19e593bb4eb649758f572bc899c\": container with ID starting with 5478b30b8b590343b8e2081970ce529c9e82e19e593bb4eb649758f572bc899c not found: ID does not exist" containerID="5478b30b8b590343b8e2081970ce529c9e82e19e593bb4eb649758f572bc899c" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.204983 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5478b30b8b590343b8e2081970ce529c9e82e19e593bb4eb649758f572bc899c"} err="failed to get container status \"5478b30b8b590343b8e2081970ce529c9e82e19e593bb4eb649758f572bc899c\": rpc error: code = NotFound desc = could not find container \"5478b30b8b590343b8e2081970ce529c9e82e19e593bb4eb649758f572bc899c\": container with ID starting with 5478b30b8b590343b8e2081970ce529c9e82e19e593bb4eb649758f572bc899c not found: ID does not exist" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.205008 4612 scope.go:117] "RemoveContainer" containerID="eba7b14d413b6ccd0a8c01776af16f34f4bff5999fc3c1c561e566a6b977edd8" Dec 03 07:48:37 crc kubenswrapper[4612]: E1203 07:48:37.205314 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eba7b14d413b6ccd0a8c01776af16f34f4bff5999fc3c1c561e566a6b977edd8\": container with ID starting with eba7b14d413b6ccd0a8c01776af16f34f4bff5999fc3c1c561e566a6b977edd8 not found: ID does not exist" containerID="eba7b14d413b6ccd0a8c01776af16f34f4bff5999fc3c1c561e566a6b977edd8" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.205345 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eba7b14d413b6ccd0a8c01776af16f34f4bff5999fc3c1c561e566a6b977edd8"} err="failed to get container status \"eba7b14d413b6ccd0a8c01776af16f34f4bff5999fc3c1c561e566a6b977edd8\": rpc error: code = NotFound desc = could not find container \"eba7b14d413b6ccd0a8c01776af16f34f4bff5999fc3c1c561e566a6b977edd8\": container with ID starting with eba7b14d413b6ccd0a8c01776af16f34f4bff5999fc3c1c561e566a6b977edd8 not found: ID does not exist" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.205360 4612 scope.go:117] "RemoveContainer" containerID="7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.252057 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-public-tls-certs\") pod \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.252112 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65hvl\" (UniqueName: \"kubernetes.io/projected/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-kube-api-access-65hvl\") pod \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.252200 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-logs\") pod \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.252221 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-combined-ca-bundle\") pod \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.252257 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-config-data\") pod \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.252292 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-internal-tls-certs\") pod \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\" (UID: \"12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd\") " Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.253816 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-logs" (OuterVolumeSpecName: "logs") pod "12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" (UID: "12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.255587 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.257212 4612 scope.go:117] "RemoveContainer" containerID="7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81" Dec 03 07:48:37 crc kubenswrapper[4612]: E1203 07:48:37.257367 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" containerName="nova-metadata-metadata" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258162 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" containerName="nova-metadata-metadata" Dec 03 07:48:37 crc kubenswrapper[4612]: E1203 07:48:37.258221 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" containerName="nova-metadata-log" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258232 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" containerName="nova-metadata-log" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258226 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-kube-api-access-65hvl" (OuterVolumeSpecName: "kube-api-access-65hvl") pod "12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" (UID: "12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd"). InnerVolumeSpecName "kube-api-access-65hvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:48:37 crc kubenswrapper[4612]: E1203 07:48:37.258269 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdbc5b1d-28f6-46f0-8cca-4a163110d00e" containerName="init" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258279 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdbc5b1d-28f6-46f0-8cca-4a163110d00e" containerName="init" Dec 03 07:48:37 crc kubenswrapper[4612]: E1203 07:48:37.258304 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" containerName="nova-api-log" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258311 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" containerName="nova-api-log" Dec 03 07:48:37 crc kubenswrapper[4612]: E1203 07:48:37.258332 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94028f3c-def3-439a-9a40-07c39f058702" containerName="nova-manage" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258339 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="94028f3c-def3-439a-9a40-07c39f058702" containerName="nova-manage" Dec 03 07:48:37 crc kubenswrapper[4612]: E1203 07:48:37.258367 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" containerName="nova-api-api" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258375 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" containerName="nova-api-api" Dec 03 07:48:37 crc kubenswrapper[4612]: E1203 07:48:37.258394 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64a9499f-d0a5-4581-85df-ff00cf6f4dfd" containerName="nova-scheduler-scheduler" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258404 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="64a9499f-d0a5-4581-85df-ff00cf6f4dfd" containerName="nova-scheduler-scheduler" Dec 03 07:48:37 crc kubenswrapper[4612]: E1203 07:48:37.258419 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdbc5b1d-28f6-46f0-8cca-4a163110d00e" containerName="dnsmasq-dns" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258448 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdbc5b1d-28f6-46f0-8cca-4a163110d00e" containerName="dnsmasq-dns" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258687 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="64a9499f-d0a5-4581-85df-ff00cf6f4dfd" containerName="nova-scheduler-scheduler" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258703 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" containerName="nova-api-api" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258716 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdbc5b1d-28f6-46f0-8cca-4a163110d00e" containerName="dnsmasq-dns" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258727 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="94028f3c-def3-439a-9a40-07c39f058702" containerName="nova-manage" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258737 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" containerName="nova-metadata-log" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258752 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" containerName="nova-metadata-metadata" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.258774 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" containerName="nova-api-log" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.260144 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: E1203 07:48:37.262383 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81\": container with ID starting with 7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81 not found: ID does not exist" containerID="7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.262482 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81"} err="failed to get container status \"7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81\": rpc error: code = NotFound desc = could not find container \"7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81\": container with ID starting with 7b79d118f6f781e4d2a6328085855cd28b3f87663172ed5fdf0c4af2226cab81 not found: ID does not exist" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.262511 4612 scope.go:117] "RemoveContainer" containerID="afb51e1ee49ba3d0356585b1329b7e7405a436ae122edc32173ee0c3cfb877ab" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.263333 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.264555 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.269755 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.286210 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.298192 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.303993 4612 scope.go:117] "RemoveContainer" containerID="89f287b3fe40f4706db9fa720705f2fe1b7f156f55bcb28f931a0aba0ed56cdc" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.312805 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.314311 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.314420 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-config-data" (OuterVolumeSpecName: "config-data") pod "12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" (UID: "12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.317622 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.318500 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" (UID: "12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.318753 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" (UID: "12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.324760 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.354484 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28910ce9-c2c5-484e-bacd-9170253c2e83-logs\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.354564 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf66f76e-bca2-466f-b672-bfe680810c3e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cf66f76e-bca2-466f-b672-bfe680810c3e\") " pod="openstack/nova-scheduler-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.354634 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" (UID: "12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.354669 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28910ce9-c2c5-484e-bacd-9170253c2e83-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.354710 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf66f76e-bca2-466f-b672-bfe680810c3e-config-data\") pod \"nova-scheduler-0\" (UID: \"cf66f76e-bca2-466f-b672-bfe680810c3e\") " pod="openstack/nova-scheduler-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.354785 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hclqf\" (UniqueName: \"kubernetes.io/projected/28910ce9-c2c5-484e-bacd-9170253c2e83-kube-api-access-hclqf\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.354807 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9hgh\" (UniqueName: \"kubernetes.io/projected/cf66f76e-bca2-466f-b672-bfe680810c3e-kube-api-access-g9hgh\") pod \"nova-scheduler-0\" (UID: \"cf66f76e-bca2-466f-b672-bfe680810c3e\") " pod="openstack/nova-scheduler-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.354855 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28910ce9-c2c5-484e-bacd-9170253c2e83-config-data\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.354899 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/28910ce9-c2c5-484e-bacd-9170253c2e83-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.354996 4612 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.355007 4612 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.355016 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65hvl\" (UniqueName: \"kubernetes.io/projected/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-kube-api-access-65hvl\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.355027 4612 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-logs\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.355039 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.355049 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.444660 4612 scope.go:117] "RemoveContainer" containerID="afb51e1ee49ba3d0356585b1329b7e7405a436ae122edc32173ee0c3cfb877ab" Dec 03 07:48:37 crc kubenswrapper[4612]: E1203 07:48:37.445105 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afb51e1ee49ba3d0356585b1329b7e7405a436ae122edc32173ee0c3cfb877ab\": container with ID starting with afb51e1ee49ba3d0356585b1329b7e7405a436ae122edc32173ee0c3cfb877ab not found: ID does not exist" containerID="afb51e1ee49ba3d0356585b1329b7e7405a436ae122edc32173ee0c3cfb877ab" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.445157 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afb51e1ee49ba3d0356585b1329b7e7405a436ae122edc32173ee0c3cfb877ab"} err="failed to get container status \"afb51e1ee49ba3d0356585b1329b7e7405a436ae122edc32173ee0c3cfb877ab\": rpc error: code = NotFound desc = could not find container \"afb51e1ee49ba3d0356585b1329b7e7405a436ae122edc32173ee0c3cfb877ab\": container with ID starting with afb51e1ee49ba3d0356585b1329b7e7405a436ae122edc32173ee0c3cfb877ab not found: ID does not exist" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.445178 4612 scope.go:117] "RemoveContainer" containerID="89f287b3fe40f4706db9fa720705f2fe1b7f156f55bcb28f931a0aba0ed56cdc" Dec 03 07:48:37 crc kubenswrapper[4612]: E1203 07:48:37.445639 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89f287b3fe40f4706db9fa720705f2fe1b7f156f55bcb28f931a0aba0ed56cdc\": container with ID starting with 89f287b3fe40f4706db9fa720705f2fe1b7f156f55bcb28f931a0aba0ed56cdc not found: ID does not exist" containerID="89f287b3fe40f4706db9fa720705f2fe1b7f156f55bcb28f931a0aba0ed56cdc" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.445684 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89f287b3fe40f4706db9fa720705f2fe1b7f156f55bcb28f931a0aba0ed56cdc"} err="failed to get container status \"89f287b3fe40f4706db9fa720705f2fe1b7f156f55bcb28f931a0aba0ed56cdc\": rpc error: code = NotFound desc = could not find container \"89f287b3fe40f4706db9fa720705f2fe1b7f156f55bcb28f931a0aba0ed56cdc\": container with ID starting with 89f287b3fe40f4706db9fa720705f2fe1b7f156f55bcb28f931a0aba0ed56cdc not found: ID does not exist" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.460935 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28910ce9-c2c5-484e-bacd-9170253c2e83-logs\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.461089 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf66f76e-bca2-466f-b672-bfe680810c3e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cf66f76e-bca2-466f-b672-bfe680810c3e\") " pod="openstack/nova-scheduler-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.461132 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28910ce9-c2c5-484e-bacd-9170253c2e83-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.461171 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf66f76e-bca2-466f-b672-bfe680810c3e-config-data\") pod \"nova-scheduler-0\" (UID: \"cf66f76e-bca2-466f-b672-bfe680810c3e\") " pod="openstack/nova-scheduler-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.461239 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hclqf\" (UniqueName: \"kubernetes.io/projected/28910ce9-c2c5-484e-bacd-9170253c2e83-kube-api-access-hclqf\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.461261 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9hgh\" (UniqueName: \"kubernetes.io/projected/cf66f76e-bca2-466f-b672-bfe680810c3e-kube-api-access-g9hgh\") pod \"nova-scheduler-0\" (UID: \"cf66f76e-bca2-466f-b672-bfe680810c3e\") " pod="openstack/nova-scheduler-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.461507 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28910ce9-c2c5-484e-bacd-9170253c2e83-logs\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.462632 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28910ce9-c2c5-484e-bacd-9170253c2e83-config-data\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.463464 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/28910ce9-c2c5-484e-bacd-9170253c2e83-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.464928 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28910ce9-c2c5-484e-bacd-9170253c2e83-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.465471 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf66f76e-bca2-466f-b672-bfe680810c3e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cf66f76e-bca2-466f-b672-bfe680810c3e\") " pod="openstack/nova-scheduler-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.466251 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf66f76e-bca2-466f-b672-bfe680810c3e-config-data\") pod \"nova-scheduler-0\" (UID: \"cf66f76e-bca2-466f-b672-bfe680810c3e\") " pod="openstack/nova-scheduler-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.474979 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28910ce9-c2c5-484e-bacd-9170253c2e83-config-data\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.481171 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.486089 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hclqf\" (UniqueName: \"kubernetes.io/projected/28910ce9-c2c5-484e-bacd-9170253c2e83-kube-api-access-hclqf\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.490128 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/28910ce9-c2c5-484e-bacd-9170253c2e83-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"28910ce9-c2c5-484e-bacd-9170253c2e83\") " pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.490739 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9hgh\" (UniqueName: \"kubernetes.io/projected/cf66f76e-bca2-466f-b672-bfe680810c3e-kube-api-access-g9hgh\") pod \"nova-scheduler-0\" (UID: \"cf66f76e-bca2-466f-b672-bfe680810c3e\") " pod="openstack/nova-scheduler-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.492423 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.507897 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.509572 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.512865 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.513051 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.513146 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.532462 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.565017 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bfb6ea50-d549-413b-b2b0-70f5d0e38954-logs\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.565068 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwc8k\" (UniqueName: \"kubernetes.io/projected/bfb6ea50-d549-413b-b2b0-70f5d0e38954-kube-api-access-nwc8k\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.565098 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfb6ea50-d549-413b-b2b0-70f5d0e38954-internal-tls-certs\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.565138 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfb6ea50-d549-413b-b2b0-70f5d0e38954-public-tls-certs\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.565185 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfb6ea50-d549-413b-b2b0-70f5d0e38954-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.565280 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfb6ea50-d549-413b-b2b0-70f5d0e38954-config-data\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.598408 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.666888 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bfb6ea50-d549-413b-b2b0-70f5d0e38954-logs\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.666956 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwc8k\" (UniqueName: \"kubernetes.io/projected/bfb6ea50-d549-413b-b2b0-70f5d0e38954-kube-api-access-nwc8k\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.666986 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfb6ea50-d549-413b-b2b0-70f5d0e38954-internal-tls-certs\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.667029 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfb6ea50-d549-413b-b2b0-70f5d0e38954-public-tls-certs\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.667111 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfb6ea50-d549-413b-b2b0-70f5d0e38954-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.667135 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfb6ea50-d549-413b-b2b0-70f5d0e38954-config-data\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.668067 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bfb6ea50-d549-413b-b2b0-70f5d0e38954-logs\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.670838 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfb6ea50-d549-413b-b2b0-70f5d0e38954-public-tls-certs\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.674729 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfb6ea50-d549-413b-b2b0-70f5d0e38954-internal-tls-certs\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.681645 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfb6ea50-d549-413b-b2b0-70f5d0e38954-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.683420 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfb6ea50-d549-413b-b2b0-70f5d0e38954-config-data\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.687915 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwc8k\" (UniqueName: \"kubernetes.io/projected/bfb6ea50-d549-413b-b2b0-70f5d0e38954-kube-api-access-nwc8k\") pod \"nova-api-0\" (UID: \"bfb6ea50-d549-413b-b2b0-70f5d0e38954\") " pod="openstack/nova-api-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.735082 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 07:48:37 crc kubenswrapper[4612]: I1203 07:48:37.871030 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 07:48:38 crc kubenswrapper[4612]: I1203 07:48:38.113452 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 07:48:38 crc kubenswrapper[4612]: W1203 07:48:38.119475 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod28910ce9_c2c5_484e_bacd_9170253c2e83.slice/crio-4d9544643c3d445e951f72efc37bde766f0019e05c91ca8e8cf8b8ca5ff73cde WatchSource:0}: Error finding container 4d9544643c3d445e951f72efc37bde766f0019e05c91ca8e8cf8b8ca5ff73cde: Status 404 returned error can't find the container with id 4d9544643c3d445e951f72efc37bde766f0019e05c91ca8e8cf8b8ca5ff73cde Dec 03 07:48:38 crc kubenswrapper[4612]: I1203 07:48:38.150012 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"28910ce9-c2c5-484e-bacd-9170253c2e83","Type":"ContainerStarted","Data":"4d9544643c3d445e951f72efc37bde766f0019e05c91ca8e8cf8b8ca5ff73cde"} Dec 03 07:48:38 crc kubenswrapper[4612]: I1203 07:48:38.191451 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 07:48:38 crc kubenswrapper[4612]: I1203 07:48:38.346432 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 07:48:39 crc kubenswrapper[4612]: I1203 07:48:39.102752 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd" path="/var/lib/kubelet/pods/12ad6f8a-1d13-4e56-93c6-27fb7b0cdbbd/volumes" Dec 03 07:48:39 crc kubenswrapper[4612]: I1203 07:48:39.103884 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32d00d3f-dcaf-4f86-b058-38666d24c6ce" path="/var/lib/kubelet/pods/32d00d3f-dcaf-4f86-b058-38666d24c6ce/volumes" Dec 03 07:48:39 crc kubenswrapper[4612]: I1203 07:48:39.104572 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64a9499f-d0a5-4581-85df-ff00cf6f4dfd" path="/var/lib/kubelet/pods/64a9499f-d0a5-4581-85df-ff00cf6f4dfd/volumes" Dec 03 07:48:39 crc kubenswrapper[4612]: I1203 07:48:39.159512 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"28910ce9-c2c5-484e-bacd-9170253c2e83","Type":"ContainerStarted","Data":"d44c9b7ae596738c9d21435a088bec4a2a50cfa39966c22042fb300a98fc696d"} Dec 03 07:48:39 crc kubenswrapper[4612]: I1203 07:48:39.159555 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"28910ce9-c2c5-484e-bacd-9170253c2e83","Type":"ContainerStarted","Data":"2ca1828c8cc09a62b9dfaae4b2eeff294345720f5d50f2b6f60df1136732b9fc"} Dec 03 07:48:39 crc kubenswrapper[4612]: I1203 07:48:39.160897 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cf66f76e-bca2-466f-b672-bfe680810c3e","Type":"ContainerStarted","Data":"e5a51f7723671b0203024c3a0115e903174fce39651b1c8a3384d994f14a12af"} Dec 03 07:48:39 crc kubenswrapper[4612]: I1203 07:48:39.160920 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cf66f76e-bca2-466f-b672-bfe680810c3e","Type":"ContainerStarted","Data":"8d4966136709fc0237bf455d311d431ff7dbf29029d66c193697e67d738c22d3"} Dec 03 07:48:39 crc kubenswrapper[4612]: I1203 07:48:39.164289 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bfb6ea50-d549-413b-b2b0-70f5d0e38954","Type":"ContainerStarted","Data":"5992154c358dcf2904836f5c8b18bdc8e0b90ecea0aee1744c2b7ac78568cf8d"} Dec 03 07:48:39 crc kubenswrapper[4612]: I1203 07:48:39.164332 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bfb6ea50-d549-413b-b2b0-70f5d0e38954","Type":"ContainerStarted","Data":"7a1c3e91a91db9ac496d5a854ebb5738b87270976646c22f8a28e428b498da63"} Dec 03 07:48:39 crc kubenswrapper[4612]: I1203 07:48:39.164344 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bfb6ea50-d549-413b-b2b0-70f5d0e38954","Type":"ContainerStarted","Data":"e1474416b93ed901f63bb6a22905893a5740319f6e76059228a87216413a5b0e"} Dec 03 07:48:39 crc kubenswrapper[4612]: I1203 07:48:39.183510 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.183485681 podStartE2EDuration="2.183485681s" podCreationTimestamp="2025-12-03 07:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:48:39.174439617 +0000 UTC m=+1282.347797047" watchObservedRunningTime="2025-12-03 07:48:39.183485681 +0000 UTC m=+1282.356843111" Dec 03 07:48:39 crc kubenswrapper[4612]: I1203 07:48:39.232352 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.232332549 podStartE2EDuration="2.232332549s" podCreationTimestamp="2025-12-03 07:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:48:39.196566015 +0000 UTC m=+1282.369923415" watchObservedRunningTime="2025-12-03 07:48:39.232332549 +0000 UTC m=+1282.405689949" Dec 03 07:48:39 crc kubenswrapper[4612]: I1203 07:48:39.250719 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.250699453 podStartE2EDuration="2.250699453s" podCreationTimestamp="2025-12-03 07:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:48:39.226041753 +0000 UTC m=+1282.399399153" watchObservedRunningTime="2025-12-03 07:48:39.250699453 +0000 UTC m=+1282.424056853" Dec 03 07:48:42 crc kubenswrapper[4612]: I1203 07:48:42.599859 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 07:48:42 crc kubenswrapper[4612]: I1203 07:48:42.600225 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 07:48:42 crc kubenswrapper[4612]: I1203 07:48:42.736770 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 07:48:47 crc kubenswrapper[4612]: I1203 07:48:47.136232 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:48:47 crc kubenswrapper[4612]: I1203 07:48:47.136802 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:48:47 crc kubenswrapper[4612]: I1203 07:48:47.599854 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 07:48:47 crc kubenswrapper[4612]: I1203 07:48:47.599905 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 07:48:47 crc kubenswrapper[4612]: I1203 07:48:47.736461 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 07:48:47 crc kubenswrapper[4612]: I1203 07:48:47.793449 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 07:48:47 crc kubenswrapper[4612]: I1203 07:48:47.871461 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 07:48:47 crc kubenswrapper[4612]: I1203 07:48:47.871502 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 07:48:48 crc kubenswrapper[4612]: I1203 07:48:48.284967 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 07:48:48 crc kubenswrapper[4612]: I1203 07:48:48.611133 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="28910ce9-c2c5-484e-bacd-9170253c2e83" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 07:48:48 crc kubenswrapper[4612]: I1203 07:48:48.611177 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="28910ce9-c2c5-484e-bacd-9170253c2e83" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 07:48:48 crc kubenswrapper[4612]: I1203 07:48:48.883111 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="bfb6ea50-d549-413b-b2b0-70f5d0e38954" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 07:48:48 crc kubenswrapper[4612]: I1203 07:48:48.883157 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="bfb6ea50-d549-413b-b2b0-70f5d0e38954" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 07:48:50 crc kubenswrapper[4612]: I1203 07:48:50.380537 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 07:48:57 crc kubenswrapper[4612]: I1203 07:48:57.612829 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 07:48:57 crc kubenswrapper[4612]: I1203 07:48:57.619467 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 07:48:57 crc kubenswrapper[4612]: I1203 07:48:57.623210 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 07:48:57 crc kubenswrapper[4612]: I1203 07:48:57.881283 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 07:48:57 crc kubenswrapper[4612]: I1203 07:48:57.881906 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 07:48:57 crc kubenswrapper[4612]: I1203 07:48:57.884367 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 07:48:57 crc kubenswrapper[4612]: I1203 07:48:57.894713 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 07:48:58 crc kubenswrapper[4612]: I1203 07:48:58.358180 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 07:48:58 crc kubenswrapper[4612]: I1203 07:48:58.364098 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 07:48:58 crc kubenswrapper[4612]: I1203 07:48:58.370884 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 07:49:06 crc kubenswrapper[4612]: I1203 07:49:06.986450 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 07:49:07 crc kubenswrapper[4612]: I1203 07:49:07.879829 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 07:49:11 crc kubenswrapper[4612]: I1203 07:49:11.454410 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" containerName="rabbitmq" containerID="cri-o://124a81902b9c84c94bd0c9f27c1be3a4c2204f1cde756f498162032fe979198c" gracePeriod=604796 Dec 03 07:49:11 crc kubenswrapper[4612]: I1203 07:49:11.935006 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="52c23f49-b562-4a42-a8bc-b2214d1f8afe" containerName="rabbitmq" containerID="cri-o://507cb878f417831a7ff1d54d5c7504e5c85241562a5aca7a3fc5204539d7f8a2" gracePeriod=604796 Dec 03 07:49:17 crc kubenswrapper[4612]: I1203 07:49:17.136396 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:49:17 crc kubenswrapper[4612]: I1203 07:49:17.137103 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:49:17 crc kubenswrapper[4612]: I1203 07:49:17.997489 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.096833 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-erlang-cookie-secret\") pod \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.097240 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-tls\") pod \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.097295 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.097345 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24t4q\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-kube-api-access-24t4q\") pod \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.097368 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-erlang-cookie\") pod \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.097415 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-config-data\") pod \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.097441 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-plugins\") pod \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.097485 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-pod-info\") pod \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.097533 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-confd\") pod \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.097568 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-server-conf\") pod \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.097605 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-plugins-conf\") pod \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\" (UID: \"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.105197 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" (UID: "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.106362 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" (UID: "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.109146 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" (UID: "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.126972 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-pod-info" (OuterVolumeSpecName: "pod-info") pod "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" (UID: "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.127051 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "persistence") pod "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" (UID: "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.127110 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" (UID: "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.127294 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" (UID: "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.137827 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-kube-api-access-24t4q" (OuterVolumeSpecName: "kube-api-access-24t4q") pod "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" (UID: "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1"). InnerVolumeSpecName "kube-api-access-24t4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.176033 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-config-data" (OuterVolumeSpecName: "config-data") pod "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" (UID: "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.199744 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.199774 4612 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.199784 4612 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-pod-info\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.199792 4612 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.199800 4612 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.199810 4612 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.199836 4612 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.199845 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24t4q\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-kube-api-access-24t4q\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.199856 4612 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.211814 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-server-conf" (OuterVolumeSpecName: "server-conf") pod "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" (UID: "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.243510 4612 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.266343 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" (UID: "5c8ebcbd-12df-4bd6-8800-c43b5656c5d1"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.301541 4612 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.301573 4612 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.301584 4612 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1-server-conf\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.438153 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.508693 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-plugins-conf\") pod \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.508746 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-erlang-cookie\") pod \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.508774 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/52c23f49-b562-4a42-a8bc-b2214d1f8afe-pod-info\") pod \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.508800 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jjf5\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-kube-api-access-2jjf5\") pod \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.508871 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-config-data\") pod \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.508914 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-server-conf\") pod \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.508937 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-plugins\") pod \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.508969 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.508990 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/52c23f49-b562-4a42-a8bc-b2214d1f8afe-erlang-cookie-secret\") pod \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.509095 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-confd\") pod \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.509119 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-tls\") pod \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\" (UID: \"52c23f49-b562-4a42-a8bc-b2214d1f8afe\") " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.509958 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "52c23f49-b562-4a42-a8bc-b2214d1f8afe" (UID: "52c23f49-b562-4a42-a8bc-b2214d1f8afe"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.510295 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "52c23f49-b562-4a42-a8bc-b2214d1f8afe" (UID: "52c23f49-b562-4a42-a8bc-b2214d1f8afe"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.517345 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "persistence") pod "52c23f49-b562-4a42-a8bc-b2214d1f8afe" (UID: "52c23f49-b562-4a42-a8bc-b2214d1f8afe"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.518802 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-kube-api-access-2jjf5" (OuterVolumeSpecName: "kube-api-access-2jjf5") pod "52c23f49-b562-4a42-a8bc-b2214d1f8afe" (UID: "52c23f49-b562-4a42-a8bc-b2214d1f8afe"). InnerVolumeSpecName "kube-api-access-2jjf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.521300 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "52c23f49-b562-4a42-a8bc-b2214d1f8afe" (UID: "52c23f49-b562-4a42-a8bc-b2214d1f8afe"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.527216 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/52c23f49-b562-4a42-a8bc-b2214d1f8afe-pod-info" (OuterVolumeSpecName: "pod-info") pod "52c23f49-b562-4a42-a8bc-b2214d1f8afe" (UID: "52c23f49-b562-4a42-a8bc-b2214d1f8afe"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.529184 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52c23f49-b562-4a42-a8bc-b2214d1f8afe-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "52c23f49-b562-4a42-a8bc-b2214d1f8afe" (UID: "52c23f49-b562-4a42-a8bc-b2214d1f8afe"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.532417 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "52c23f49-b562-4a42-a8bc-b2214d1f8afe" (UID: "52c23f49-b562-4a42-a8bc-b2214d1f8afe"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.554325 4612 generic.go:334] "Generic (PLEG): container finished" podID="52c23f49-b562-4a42-a8bc-b2214d1f8afe" containerID="507cb878f417831a7ff1d54d5c7504e5c85241562a5aca7a3fc5204539d7f8a2" exitCode=0 Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.554434 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.554498 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"52c23f49-b562-4a42-a8bc-b2214d1f8afe","Type":"ContainerDied","Data":"507cb878f417831a7ff1d54d5c7504e5c85241562a5aca7a3fc5204539d7f8a2"} Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.556294 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"52c23f49-b562-4a42-a8bc-b2214d1f8afe","Type":"ContainerDied","Data":"a12c32f60230de1b9bf6c5e49f46702ab08910384e9876c5b1ea24bccac59855"} Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.556328 4612 scope.go:117] "RemoveContainer" containerID="507cb878f417831a7ff1d54d5c7504e5c85241562a5aca7a3fc5204539d7f8a2" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.577456 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-config-data" (OuterVolumeSpecName: "config-data") pod "52c23f49-b562-4a42-a8bc-b2214d1f8afe" (UID: "52c23f49-b562-4a42-a8bc-b2214d1f8afe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.578396 4612 generic.go:334] "Generic (PLEG): container finished" podID="5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" containerID="124a81902b9c84c94bd0c9f27c1be3a4c2204f1cde756f498162032fe979198c" exitCode=0 Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.578683 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1","Type":"ContainerDied","Data":"124a81902b9c84c94bd0c9f27c1be3a4c2204f1cde756f498162032fe979198c"} Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.578715 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5c8ebcbd-12df-4bd6-8800-c43b5656c5d1","Type":"ContainerDied","Data":"071bf769ad8738bd74966b24b554a38ca7620980de1bd428da0783871c26086d"} Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.578717 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.604781 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-server-conf" (OuterVolumeSpecName: "server-conf") pod "52c23f49-b562-4a42-a8bc-b2214d1f8afe" (UID: "52c23f49-b562-4a42-a8bc-b2214d1f8afe"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.611350 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.611376 4612 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-server-conf\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.611386 4612 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.611395 4612 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/52c23f49-b562-4a42-a8bc-b2214d1f8afe-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.611415 4612 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.611423 4612 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.611433 4612 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/52c23f49-b562-4a42-a8bc-b2214d1f8afe-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.611441 4612 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.611449 4612 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/52c23f49-b562-4a42-a8bc-b2214d1f8afe-pod-info\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.611458 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jjf5\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-kube-api-access-2jjf5\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.618497 4612 scope.go:117] "RemoveContainer" containerID="8a5e9dbc4ad2bf4a254c8ed03a3f0ab6ef31feaf8816c7ca29b6f84aeaa17c8f" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.619211 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.629349 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.647755 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "52c23f49-b562-4a42-a8bc-b2214d1f8afe" (UID: "52c23f49-b562-4a42-a8bc-b2214d1f8afe"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.651637 4612 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.666785 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 07:49:18 crc kubenswrapper[4612]: E1203 07:49:18.667185 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52c23f49-b562-4a42-a8bc-b2214d1f8afe" containerName="setup-container" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.667197 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="52c23f49-b562-4a42-a8bc-b2214d1f8afe" containerName="setup-container" Dec 03 07:49:18 crc kubenswrapper[4612]: E1203 07:49:18.667240 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" containerName="setup-container" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.667247 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" containerName="setup-container" Dec 03 07:49:18 crc kubenswrapper[4612]: E1203 07:49:18.667258 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" containerName="rabbitmq" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.667264 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" containerName="rabbitmq" Dec 03 07:49:18 crc kubenswrapper[4612]: E1203 07:49:18.667276 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52c23f49-b562-4a42-a8bc-b2214d1f8afe" containerName="rabbitmq" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.667283 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="52c23f49-b562-4a42-a8bc-b2214d1f8afe" containerName="rabbitmq" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.667452 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="52c23f49-b562-4a42-a8bc-b2214d1f8afe" containerName="rabbitmq" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.667463 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" containerName="rabbitmq" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.667874 4612 scope.go:117] "RemoveContainer" containerID="507cb878f417831a7ff1d54d5c7504e5c85241562a5aca7a3fc5204539d7f8a2" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.668368 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.675545 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.677285 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.677577 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.677730 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.677869 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.679862 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.684205 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-4q5j6" Dec 03 07:49:18 crc kubenswrapper[4612]: E1203 07:49:18.688433 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"507cb878f417831a7ff1d54d5c7504e5c85241562a5aca7a3fc5204539d7f8a2\": container with ID starting with 507cb878f417831a7ff1d54d5c7504e5c85241562a5aca7a3fc5204539d7f8a2 not found: ID does not exist" containerID="507cb878f417831a7ff1d54d5c7504e5c85241562a5aca7a3fc5204539d7f8a2" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.688470 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"507cb878f417831a7ff1d54d5c7504e5c85241562a5aca7a3fc5204539d7f8a2"} err="failed to get container status \"507cb878f417831a7ff1d54d5c7504e5c85241562a5aca7a3fc5204539d7f8a2\": rpc error: code = NotFound desc = could not find container \"507cb878f417831a7ff1d54d5c7504e5c85241562a5aca7a3fc5204539d7f8a2\": container with ID starting with 507cb878f417831a7ff1d54d5c7504e5c85241562a5aca7a3fc5204539d7f8a2 not found: ID does not exist" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.688494 4612 scope.go:117] "RemoveContainer" containerID="8a5e9dbc4ad2bf4a254c8ed03a3f0ab6ef31feaf8816c7ca29b6f84aeaa17c8f" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.693693 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 07:49:18 crc kubenswrapper[4612]: E1203 07:49:18.694453 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a5e9dbc4ad2bf4a254c8ed03a3f0ab6ef31feaf8816c7ca29b6f84aeaa17c8f\": container with ID starting with 8a5e9dbc4ad2bf4a254c8ed03a3f0ab6ef31feaf8816c7ca29b6f84aeaa17c8f not found: ID does not exist" containerID="8a5e9dbc4ad2bf4a254c8ed03a3f0ab6ef31feaf8816c7ca29b6f84aeaa17c8f" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.694493 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a5e9dbc4ad2bf4a254c8ed03a3f0ab6ef31feaf8816c7ca29b6f84aeaa17c8f"} err="failed to get container status \"8a5e9dbc4ad2bf4a254c8ed03a3f0ab6ef31feaf8816c7ca29b6f84aeaa17c8f\": rpc error: code = NotFound desc = could not find container \"8a5e9dbc4ad2bf4a254c8ed03a3f0ab6ef31feaf8816c7ca29b6f84aeaa17c8f\": container with ID starting with 8a5e9dbc4ad2bf4a254c8ed03a3f0ab6ef31feaf8816c7ca29b6f84aeaa17c8f not found: ID does not exist" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.694524 4612 scope.go:117] "RemoveContainer" containerID="124a81902b9c84c94bd0c9f27c1be3a4c2204f1cde756f498162032fe979198c" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.714346 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.714386 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/08764b2d-6ed4-4495-8338-03d2af8dcbdd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.714438 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/08764b2d-6ed4-4495-8338-03d2af8dcbdd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.714501 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/08764b2d-6ed4-4495-8338-03d2af8dcbdd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.714549 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/08764b2d-6ed4-4495-8338-03d2af8dcbdd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.714568 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/08764b2d-6ed4-4495-8338-03d2af8dcbdd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.714581 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/08764b2d-6ed4-4495-8338-03d2af8dcbdd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.714594 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfrhc\" (UniqueName: \"kubernetes.io/projected/08764b2d-6ed4-4495-8338-03d2af8dcbdd-kube-api-access-nfrhc\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.714615 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/08764b2d-6ed4-4495-8338-03d2af8dcbdd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.714810 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/08764b2d-6ed4-4495-8338-03d2af8dcbdd-config-data\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.714835 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/08764b2d-6ed4-4495-8338-03d2af8dcbdd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.714903 4612 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.714915 4612 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/52c23f49-b562-4a42-a8bc-b2214d1f8afe-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.767767 4612 scope.go:117] "RemoveContainer" containerID="cd57db7eedc85dbf36d0fa09300b355454260f405835018b1b803eaa868cfe74" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.801412 4612 scope.go:117] "RemoveContainer" containerID="124a81902b9c84c94bd0c9f27c1be3a4c2204f1cde756f498162032fe979198c" Dec 03 07:49:18 crc kubenswrapper[4612]: E1203 07:49:18.803637 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"124a81902b9c84c94bd0c9f27c1be3a4c2204f1cde756f498162032fe979198c\": container with ID starting with 124a81902b9c84c94bd0c9f27c1be3a4c2204f1cde756f498162032fe979198c not found: ID does not exist" containerID="124a81902b9c84c94bd0c9f27c1be3a4c2204f1cde756f498162032fe979198c" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.803685 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"124a81902b9c84c94bd0c9f27c1be3a4c2204f1cde756f498162032fe979198c"} err="failed to get container status \"124a81902b9c84c94bd0c9f27c1be3a4c2204f1cde756f498162032fe979198c\": rpc error: code = NotFound desc = could not find container \"124a81902b9c84c94bd0c9f27c1be3a4c2204f1cde756f498162032fe979198c\": container with ID starting with 124a81902b9c84c94bd0c9f27c1be3a4c2204f1cde756f498162032fe979198c not found: ID does not exist" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.803715 4612 scope.go:117] "RemoveContainer" containerID="cd57db7eedc85dbf36d0fa09300b355454260f405835018b1b803eaa868cfe74" Dec 03 07:49:18 crc kubenswrapper[4612]: E1203 07:49:18.804060 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd57db7eedc85dbf36d0fa09300b355454260f405835018b1b803eaa868cfe74\": container with ID starting with cd57db7eedc85dbf36d0fa09300b355454260f405835018b1b803eaa868cfe74 not found: ID does not exist" containerID="cd57db7eedc85dbf36d0fa09300b355454260f405835018b1b803eaa868cfe74" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.804088 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd57db7eedc85dbf36d0fa09300b355454260f405835018b1b803eaa868cfe74"} err="failed to get container status \"cd57db7eedc85dbf36d0fa09300b355454260f405835018b1b803eaa868cfe74\": rpc error: code = NotFound desc = could not find container \"cd57db7eedc85dbf36d0fa09300b355454260f405835018b1b803eaa868cfe74\": container with ID starting with cd57db7eedc85dbf36d0fa09300b355454260f405835018b1b803eaa868cfe74 not found: ID does not exist" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.816216 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/08764b2d-6ed4-4495-8338-03d2af8dcbdd-config-data\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.816261 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/08764b2d-6ed4-4495-8338-03d2af8dcbdd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.816314 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.816348 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/08764b2d-6ed4-4495-8338-03d2af8dcbdd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.816389 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/08764b2d-6ed4-4495-8338-03d2af8dcbdd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.816462 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/08764b2d-6ed4-4495-8338-03d2af8dcbdd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.816508 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/08764b2d-6ed4-4495-8338-03d2af8dcbdd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.816529 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/08764b2d-6ed4-4495-8338-03d2af8dcbdd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.816544 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/08764b2d-6ed4-4495-8338-03d2af8dcbdd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.816559 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfrhc\" (UniqueName: \"kubernetes.io/projected/08764b2d-6ed4-4495-8338-03d2af8dcbdd-kube-api-access-nfrhc\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.816582 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/08764b2d-6ed4-4495-8338-03d2af8dcbdd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.817072 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/08764b2d-6ed4-4495-8338-03d2af8dcbdd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.817096 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/08764b2d-6ed4-4495-8338-03d2af8dcbdd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.817354 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.820672 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/08764b2d-6ed4-4495-8338-03d2af8dcbdd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.820786 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/08764b2d-6ed4-4495-8338-03d2af8dcbdd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.820910 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/08764b2d-6ed4-4495-8338-03d2af8dcbdd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.821207 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/08764b2d-6ed4-4495-8338-03d2af8dcbdd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.821480 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/08764b2d-6ed4-4495-8338-03d2af8dcbdd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.822018 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/08764b2d-6ed4-4495-8338-03d2af8dcbdd-config-data\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.825403 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/08764b2d-6ed4-4495-8338-03d2af8dcbdd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.836817 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfrhc\" (UniqueName: \"kubernetes.io/projected/08764b2d-6ed4-4495-8338-03d2af8dcbdd-kube-api-access-nfrhc\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.869199 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"08764b2d-6ed4-4495-8338-03d2af8dcbdd\") " pod="openstack/rabbitmq-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.921533 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.930009 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.952668 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.954237 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.955819 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.956611 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.956683 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.956753 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.960703 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.961262 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.962065 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-wglvs" Dec 03 07:49:18 crc kubenswrapper[4612]: I1203 07:49:18.969335 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.019830 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6ed2435b-f44d-4468-baec-035755359147-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.019928 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6ed2435b-f44d-4468-baec-035755359147-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.019973 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6ed2435b-f44d-4468-baec-035755359147-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.019991 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6ed2435b-f44d-4468-baec-035755359147-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.020024 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6ed2435b-f44d-4468-baec-035755359147-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.020115 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29xq4\" (UniqueName: \"kubernetes.io/projected/6ed2435b-f44d-4468-baec-035755359147-kube-api-access-29xq4\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.020159 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6ed2435b-f44d-4468-baec-035755359147-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.020209 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6ed2435b-f44d-4468-baec-035755359147-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.020238 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6ed2435b-f44d-4468-baec-035755359147-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.020263 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6ed2435b-f44d-4468-baec-035755359147-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.020282 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.067560 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.109853 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52c23f49-b562-4a42-a8bc-b2214d1f8afe" path="/var/lib/kubelet/pods/52c23f49-b562-4a42-a8bc-b2214d1f8afe/volumes" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.114935 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c8ebcbd-12df-4bd6-8800-c43b5656c5d1" path="/var/lib/kubelet/pods/5c8ebcbd-12df-4bd6-8800-c43b5656c5d1/volumes" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.122142 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29xq4\" (UniqueName: \"kubernetes.io/projected/6ed2435b-f44d-4468-baec-035755359147-kube-api-access-29xq4\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.122182 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6ed2435b-f44d-4468-baec-035755359147-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.122219 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6ed2435b-f44d-4468-baec-035755359147-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.122246 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6ed2435b-f44d-4468-baec-035755359147-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.122262 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6ed2435b-f44d-4468-baec-035755359147-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.122280 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.122308 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6ed2435b-f44d-4468-baec-035755359147-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.122352 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6ed2435b-f44d-4468-baec-035755359147-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.122380 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6ed2435b-f44d-4468-baec-035755359147-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.122395 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6ed2435b-f44d-4468-baec-035755359147-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.122435 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6ed2435b-f44d-4468-baec-035755359147-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.123841 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6ed2435b-f44d-4468-baec-035755359147-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.124564 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6ed2435b-f44d-4468-baec-035755359147-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.124932 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6ed2435b-f44d-4468-baec-035755359147-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.125902 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6ed2435b-f44d-4468-baec-035755359147-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.127200 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6ed2435b-f44d-4468-baec-035755359147-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.130095 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6ed2435b-f44d-4468-baec-035755359147-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.131189 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6ed2435b-f44d-4468-baec-035755359147-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.130927 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6ed2435b-f44d-4468-baec-035755359147-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.131544 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.141046 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6ed2435b-f44d-4468-baec-035755359147-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.156766 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29xq4\" (UniqueName: \"kubernetes.io/projected/6ed2435b-f44d-4468-baec-035755359147-kube-api-access-29xq4\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.196355 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6ed2435b-f44d-4468-baec-035755359147\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.277561 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.374322 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-4fmlk"] Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.376281 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.386850 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.409485 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-4fmlk"] Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.428119 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf8wn\" (UniqueName: \"kubernetes.io/projected/5d61d1d6-8fdd-4349-bae7-c7123aa58742-kube-api-access-zf8wn\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.428438 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.428531 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.428570 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.428609 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.428648 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-dns-svc\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.428685 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-config\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.507809 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 07:49:19 crc kubenswrapper[4612]: W1203 07:49:19.518875 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08764b2d_6ed4_4495_8338_03d2af8dcbdd.slice/crio-09dab986078299478f3226bc172b358ab8fb81a08ba0caa75a34a91e2474834e WatchSource:0}: Error finding container 09dab986078299478f3226bc172b358ab8fb81a08ba0caa75a34a91e2474834e: Status 404 returned error can't find the container with id 09dab986078299478f3226bc172b358ab8fb81a08ba0caa75a34a91e2474834e Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.529721 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf8wn\" (UniqueName: \"kubernetes.io/projected/5d61d1d6-8fdd-4349-bae7-c7123aa58742-kube-api-access-zf8wn\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.529758 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.529840 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.529866 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.529911 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.529964 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-dns-svc\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.530017 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-config\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.531054 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-config\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.531558 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.532464 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.536350 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-dns-svc\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.537081 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.537865 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.555882 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf8wn\" (UniqueName: \"kubernetes.io/projected/5d61d1d6-8fdd-4349-bae7-c7123aa58742-kube-api-access-zf8wn\") pod \"dnsmasq-dns-5576978c7c-4fmlk\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.591376 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"08764b2d-6ed4-4495-8338-03d2af8dcbdd","Type":"ContainerStarted","Data":"09dab986078299478f3226bc172b358ab8fb81a08ba0caa75a34a91e2474834e"} Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.727488 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:19 crc kubenswrapper[4612]: I1203 07:49:19.840188 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 07:49:20 crc kubenswrapper[4612]: W1203 07:49:20.185087 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d61d1d6_8fdd_4349_bae7_c7123aa58742.slice/crio-8f237297ba8cae65fa9cc81acafa1a663d08b49e738a250e656883f8153ca4d3 WatchSource:0}: Error finding container 8f237297ba8cae65fa9cc81acafa1a663d08b49e738a250e656883f8153ca4d3: Status 404 returned error can't find the container with id 8f237297ba8cae65fa9cc81acafa1a663d08b49e738a250e656883f8153ca4d3 Dec 03 07:49:20 crc kubenswrapper[4612]: I1203 07:49:20.187361 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-4fmlk"] Dec 03 07:49:20 crc kubenswrapper[4612]: I1203 07:49:20.604838 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" event={"ID":"5d61d1d6-8fdd-4349-bae7-c7123aa58742","Type":"ContainerStarted","Data":"8f237297ba8cae65fa9cc81acafa1a663d08b49e738a250e656883f8153ca4d3"} Dec 03 07:49:20 crc kubenswrapper[4612]: I1203 07:49:20.607166 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6ed2435b-f44d-4468-baec-035755359147","Type":"ContainerStarted","Data":"55d64314b1eae3290ca1a29fd2a64426955cf90eb96d7627cbd5faa657970eb4"} Dec 03 07:49:21 crc kubenswrapper[4612]: I1203 07:49:21.618278 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"08764b2d-6ed4-4495-8338-03d2af8dcbdd","Type":"ContainerStarted","Data":"2b460d9f6a91bc0d1427a965eed82b2f04eff122a8dd35d3c4683a8180e78519"} Dec 03 07:49:21 crc kubenswrapper[4612]: I1203 07:49:21.621404 4612 generic.go:334] "Generic (PLEG): container finished" podID="5d61d1d6-8fdd-4349-bae7-c7123aa58742" containerID="bd58493ba48b89b8d16c9e8faa48472bc0402ce066579d8225b5684e09e823f3" exitCode=0 Dec 03 07:49:21 crc kubenswrapper[4612]: I1203 07:49:21.622006 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" event={"ID":"5d61d1d6-8fdd-4349-bae7-c7123aa58742","Type":"ContainerDied","Data":"bd58493ba48b89b8d16c9e8faa48472bc0402ce066579d8225b5684e09e823f3"} Dec 03 07:49:21 crc kubenswrapper[4612]: I1203 07:49:21.624460 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6ed2435b-f44d-4468-baec-035755359147","Type":"ContainerStarted","Data":"009b4f8ee21fcfad93e42ed9dcf8d40b28908e88bb6ac02721624f56f6b8abe6"} Dec 03 07:49:22 crc kubenswrapper[4612]: I1203 07:49:22.640557 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" event={"ID":"5d61d1d6-8fdd-4349-bae7-c7123aa58742","Type":"ContainerStarted","Data":"4be405bc0f9e3d2374bf28a22bff8c5609f5515cd4aa7ece85ca9480dc714f49"} Dec 03 07:49:22 crc kubenswrapper[4612]: I1203 07:49:22.682874 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" podStartSLOduration=3.682849344 podStartE2EDuration="3.682849344s" podCreationTimestamp="2025-12-03 07:49:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:49:22.678098046 +0000 UTC m=+1325.851455466" watchObservedRunningTime="2025-12-03 07:49:22.682849344 +0000 UTC m=+1325.856206764" Dec 03 07:49:23 crc kubenswrapper[4612]: I1203 07:49:23.647923 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:29 crc kubenswrapper[4612]: I1203 07:49:29.730242 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:29 crc kubenswrapper[4612]: I1203 07:49:29.822114 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-llz5b"] Dec 03 07:49:29 crc kubenswrapper[4612]: I1203 07:49:29.822774 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" podUID="6e2cf39c-6b71-49e5-b776-708ca1bf19f7" containerName="dnsmasq-dns" containerID="cri-o://1c687ffc03f4ccbc71f0955fceafb323600f1cb8fe1e42dd315e686ea918420c" gracePeriod=10 Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.016421 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-667c9c995c-kcpm2"] Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.018393 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.047139 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-667c9c995c-kcpm2"] Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.150350 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqrp6\" (UniqueName: \"kubernetes.io/projected/231aa396-d447-46be-b443-03de13ee8d90-kube-api-access-vqrp6\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.150432 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-openstack-edpm-ipam\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.150485 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-dns-svc\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.150507 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-dns-swift-storage-0\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.150541 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-config\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.150570 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-ovsdbserver-sb\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.150629 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-ovsdbserver-nb\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.252952 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-dns-svc\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.252999 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-dns-swift-storage-0\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.253029 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-config\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.253080 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-ovsdbserver-sb\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.253126 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-ovsdbserver-nb\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.253199 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqrp6\" (UniqueName: \"kubernetes.io/projected/231aa396-d447-46be-b443-03de13ee8d90-kube-api-access-vqrp6\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.253253 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-openstack-edpm-ipam\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.253985 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-openstack-edpm-ipam\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.254696 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-dns-svc\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.255193 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-dns-swift-storage-0\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.256021 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-config\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.257532 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-ovsdbserver-sb\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.258249 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/231aa396-d447-46be-b443-03de13ee8d90-ovsdbserver-nb\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.281910 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqrp6\" (UniqueName: \"kubernetes.io/projected/231aa396-d447-46be-b443-03de13ee8d90-kube-api-access-vqrp6\") pod \"dnsmasq-dns-667c9c995c-kcpm2\" (UID: \"231aa396-d447-46be-b443-03de13ee8d90\") " pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.333830 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.452876 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.563618 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-ovsdbserver-sb\") pod \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.563685 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-dns-swift-storage-0\") pod \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.563734 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-config\") pod \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.563830 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-ovsdbserver-nb\") pod \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.563870 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nl5n9\" (UniqueName: \"kubernetes.io/projected/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-kube-api-access-nl5n9\") pod \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.563916 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-dns-svc\") pod \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\" (UID: \"6e2cf39c-6b71-49e5-b776-708ca1bf19f7\") " Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.581212 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-kube-api-access-nl5n9" (OuterVolumeSpecName: "kube-api-access-nl5n9") pod "6e2cf39c-6b71-49e5-b776-708ca1bf19f7" (UID: "6e2cf39c-6b71-49e5-b776-708ca1bf19f7"). InnerVolumeSpecName "kube-api-access-nl5n9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.618224 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6e2cf39c-6b71-49e5-b776-708ca1bf19f7" (UID: "6e2cf39c-6b71-49e5-b776-708ca1bf19f7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.627173 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6e2cf39c-6b71-49e5-b776-708ca1bf19f7" (UID: "6e2cf39c-6b71-49e5-b776-708ca1bf19f7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.641842 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6e2cf39c-6b71-49e5-b776-708ca1bf19f7" (UID: "6e2cf39c-6b71-49e5-b776-708ca1bf19f7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.644312 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6e2cf39c-6b71-49e5-b776-708ca1bf19f7" (UID: "6e2cf39c-6b71-49e5-b776-708ca1bf19f7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.666766 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.666794 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.666825 4612 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.666836 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.666845 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nl5n9\" (UniqueName: \"kubernetes.io/projected/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-kube-api-access-nl5n9\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.680901 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-config" (OuterVolumeSpecName: "config") pod "6e2cf39c-6b71-49e5-b776-708ca1bf19f7" (UID: "6e2cf39c-6b71-49e5-b776-708ca1bf19f7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.736977 4612 generic.go:334] "Generic (PLEG): container finished" podID="6e2cf39c-6b71-49e5-b776-708ca1bf19f7" containerID="1c687ffc03f4ccbc71f0955fceafb323600f1cb8fe1e42dd315e686ea918420c" exitCode=0 Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.737028 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" event={"ID":"6e2cf39c-6b71-49e5-b776-708ca1bf19f7","Type":"ContainerDied","Data":"1c687ffc03f4ccbc71f0955fceafb323600f1cb8fe1e42dd315e686ea918420c"} Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.737055 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" event={"ID":"6e2cf39c-6b71-49e5-b776-708ca1bf19f7","Type":"ContainerDied","Data":"16d143c143850045c9d3a6361b4c569c74e5f7d0168570056f49dc519c7fa3e9"} Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.737087 4612 scope.go:117] "RemoveContainer" containerID="1c687ffc03f4ccbc71f0955fceafb323600f1cb8fe1e42dd315e686ea918420c" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.737244 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-llz5b" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.769455 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-llz5b"] Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.769714 4612 scope.go:117] "RemoveContainer" containerID="924b07482ca700f86a5b48c3cf8867796cfe6dd37dddc8d39ebd66cd4e7efcea" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.770837 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e2cf39c-6b71-49e5-b776-708ca1bf19f7-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.779425 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-llz5b"] Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.788466 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-667c9c995c-kcpm2"] Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.794688 4612 scope.go:117] "RemoveContainer" containerID="1c687ffc03f4ccbc71f0955fceafb323600f1cb8fe1e42dd315e686ea918420c" Dec 03 07:49:30 crc kubenswrapper[4612]: E1203 07:49:30.795309 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c687ffc03f4ccbc71f0955fceafb323600f1cb8fe1e42dd315e686ea918420c\": container with ID starting with 1c687ffc03f4ccbc71f0955fceafb323600f1cb8fe1e42dd315e686ea918420c not found: ID does not exist" containerID="1c687ffc03f4ccbc71f0955fceafb323600f1cb8fe1e42dd315e686ea918420c" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.795349 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c687ffc03f4ccbc71f0955fceafb323600f1cb8fe1e42dd315e686ea918420c"} err="failed to get container status \"1c687ffc03f4ccbc71f0955fceafb323600f1cb8fe1e42dd315e686ea918420c\": rpc error: code = NotFound desc = could not find container \"1c687ffc03f4ccbc71f0955fceafb323600f1cb8fe1e42dd315e686ea918420c\": container with ID starting with 1c687ffc03f4ccbc71f0955fceafb323600f1cb8fe1e42dd315e686ea918420c not found: ID does not exist" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.795374 4612 scope.go:117] "RemoveContainer" containerID="924b07482ca700f86a5b48c3cf8867796cfe6dd37dddc8d39ebd66cd4e7efcea" Dec 03 07:49:30 crc kubenswrapper[4612]: E1203 07:49:30.796185 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"924b07482ca700f86a5b48c3cf8867796cfe6dd37dddc8d39ebd66cd4e7efcea\": container with ID starting with 924b07482ca700f86a5b48c3cf8867796cfe6dd37dddc8d39ebd66cd4e7efcea not found: ID does not exist" containerID="924b07482ca700f86a5b48c3cf8867796cfe6dd37dddc8d39ebd66cd4e7efcea" Dec 03 07:49:30 crc kubenswrapper[4612]: I1203 07:49:30.796228 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"924b07482ca700f86a5b48c3cf8867796cfe6dd37dddc8d39ebd66cd4e7efcea"} err="failed to get container status \"924b07482ca700f86a5b48c3cf8867796cfe6dd37dddc8d39ebd66cd4e7efcea\": rpc error: code = NotFound desc = could not find container \"924b07482ca700f86a5b48c3cf8867796cfe6dd37dddc8d39ebd66cd4e7efcea\": container with ID starting with 924b07482ca700f86a5b48c3cf8867796cfe6dd37dddc8d39ebd66cd4e7efcea not found: ID does not exist" Dec 03 07:49:31 crc kubenswrapper[4612]: I1203 07:49:31.100496 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e2cf39c-6b71-49e5-b776-708ca1bf19f7" path="/var/lib/kubelet/pods/6e2cf39c-6b71-49e5-b776-708ca1bf19f7/volumes" Dec 03 07:49:31 crc kubenswrapper[4612]: I1203 07:49:31.759899 4612 generic.go:334] "Generic (PLEG): container finished" podID="231aa396-d447-46be-b443-03de13ee8d90" containerID="3130edc95a6c5a6b2212ab7872b2beb07c07b6f813a89857b96925eb3994a699" exitCode=0 Dec 03 07:49:31 crc kubenswrapper[4612]: I1203 07:49:31.759959 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" event={"ID":"231aa396-d447-46be-b443-03de13ee8d90","Type":"ContainerDied","Data":"3130edc95a6c5a6b2212ab7872b2beb07c07b6f813a89857b96925eb3994a699"} Dec 03 07:49:31 crc kubenswrapper[4612]: I1203 07:49:31.759986 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" event={"ID":"231aa396-d447-46be-b443-03de13ee8d90","Type":"ContainerStarted","Data":"fe6d3cb530e73f6fe8d441fedf2191653f0ac4a687bf1e4ed72c04b2be0f97ef"} Dec 03 07:49:32 crc kubenswrapper[4612]: I1203 07:49:32.771728 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" event={"ID":"231aa396-d447-46be-b443-03de13ee8d90","Type":"ContainerStarted","Data":"b7a9711a3ddb87d7b5d0c6e707e4e62152a1ffba292983183a25e4461bbdc6d0"} Dec 03 07:49:32 crc kubenswrapper[4612]: I1203 07:49:32.772077 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:32 crc kubenswrapper[4612]: I1203 07:49:32.801321 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" podStartSLOduration=3.801300548 podStartE2EDuration="3.801300548s" podCreationTimestamp="2025-12-03 07:49:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:49:32.794727875 +0000 UTC m=+1335.968085305" watchObservedRunningTime="2025-12-03 07:49:32.801300548 +0000 UTC m=+1335.974657948" Dec 03 07:49:40 crc kubenswrapper[4612]: I1203 07:49:40.336269 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-667c9c995c-kcpm2" Dec 03 07:49:40 crc kubenswrapper[4612]: I1203 07:49:40.480327 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-4fmlk"] Dec 03 07:49:40 crc kubenswrapper[4612]: I1203 07:49:40.480629 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" podUID="5d61d1d6-8fdd-4349-bae7-c7123aa58742" containerName="dnsmasq-dns" containerID="cri-o://4be405bc0f9e3d2374bf28a22bff8c5609f5515cd4aa7ece85ca9480dc714f49" gracePeriod=10 Dec 03 07:49:40 crc kubenswrapper[4612]: I1203 07:49:40.861011 4612 generic.go:334] "Generic (PLEG): container finished" podID="5d61d1d6-8fdd-4349-bae7-c7123aa58742" containerID="4be405bc0f9e3d2374bf28a22bff8c5609f5515cd4aa7ece85ca9480dc714f49" exitCode=0 Dec 03 07:49:40 crc kubenswrapper[4612]: I1203 07:49:40.861106 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" event={"ID":"5d61d1d6-8fdd-4349-bae7-c7123aa58742","Type":"ContainerDied","Data":"4be405bc0f9e3d2374bf28a22bff8c5609f5515cd4aa7ece85ca9480dc714f49"} Dec 03 07:49:40 crc kubenswrapper[4612]: I1203 07:49:40.972885 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.048671 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-openstack-edpm-ipam\") pod \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.048776 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-dns-svc\") pod \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.049520 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf8wn\" (UniqueName: \"kubernetes.io/projected/5d61d1d6-8fdd-4349-bae7-c7123aa58742-kube-api-access-zf8wn\") pod \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.049575 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-ovsdbserver-sb\") pod \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.049597 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-dns-swift-storage-0\") pod \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.049675 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-config\") pod \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.049742 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-ovsdbserver-nb\") pod \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\" (UID: \"5d61d1d6-8fdd-4349-bae7-c7123aa58742\") " Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.070225 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d61d1d6-8fdd-4349-bae7-c7123aa58742-kube-api-access-zf8wn" (OuterVolumeSpecName: "kube-api-access-zf8wn") pod "5d61d1d6-8fdd-4349-bae7-c7123aa58742" (UID: "5d61d1d6-8fdd-4349-bae7-c7123aa58742"). InnerVolumeSpecName "kube-api-access-zf8wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.114400 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5d61d1d6-8fdd-4349-bae7-c7123aa58742" (UID: "5d61d1d6-8fdd-4349-bae7-c7123aa58742"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.117844 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "5d61d1d6-8fdd-4349-bae7-c7123aa58742" (UID: "5d61d1d6-8fdd-4349-bae7-c7123aa58742"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.118068 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5d61d1d6-8fdd-4349-bae7-c7123aa58742" (UID: "5d61d1d6-8fdd-4349-bae7-c7123aa58742"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.128012 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5d61d1d6-8fdd-4349-bae7-c7123aa58742" (UID: "5d61d1d6-8fdd-4349-bae7-c7123aa58742"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.136167 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5d61d1d6-8fdd-4349-bae7-c7123aa58742" (UID: "5d61d1d6-8fdd-4349-bae7-c7123aa58742"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.150727 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-config" (OuterVolumeSpecName: "config") pod "5d61d1d6-8fdd-4349-bae7-c7123aa58742" (UID: "5d61d1d6-8fdd-4349-bae7-c7123aa58742"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.151916 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.151935 4612 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.151965 4612 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.151976 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf8wn\" (UniqueName: \"kubernetes.io/projected/5d61d1d6-8fdd-4349-bae7-c7123aa58742-kube-api-access-zf8wn\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.151986 4612 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.151994 4612 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.152005 4612 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d61d1d6-8fdd-4349-bae7-c7123aa58742-config\") on node \"crc\" DevicePath \"\"" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.870657 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" event={"ID":"5d61d1d6-8fdd-4349-bae7-c7123aa58742","Type":"ContainerDied","Data":"8f237297ba8cae65fa9cc81acafa1a663d08b49e738a250e656883f8153ca4d3"} Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.870702 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-4fmlk" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.870919 4612 scope.go:117] "RemoveContainer" containerID="4be405bc0f9e3d2374bf28a22bff8c5609f5515cd4aa7ece85ca9480dc714f49" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.901814 4612 scope.go:117] "RemoveContainer" containerID="bd58493ba48b89b8d16c9e8faa48472bc0402ce066579d8225b5684e09e823f3" Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.903020 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-4fmlk"] Dec 03 07:49:41 crc kubenswrapper[4612]: I1203 07:49:41.912178 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-4fmlk"] Dec 03 07:49:43 crc kubenswrapper[4612]: I1203 07:49:43.129924 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d61d1d6-8fdd-4349-bae7-c7123aa58742" path="/var/lib/kubelet/pods/5d61d1d6-8fdd-4349-bae7-c7123aa58742/volumes" Dec 03 07:49:47 crc kubenswrapper[4612]: I1203 07:49:47.136358 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:49:47 crc kubenswrapper[4612]: I1203 07:49:47.137558 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:49:47 crc kubenswrapper[4612]: I1203 07:49:47.137686 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:49:47 crc kubenswrapper[4612]: I1203 07:49:47.138407 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7105508fd7187feb30bdf8f839f0a6f2f8652223659d23672f28b78428b4cb14"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 07:49:47 crc kubenswrapper[4612]: I1203 07:49:47.138525 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://7105508fd7187feb30bdf8f839f0a6f2f8652223659d23672f28b78428b4cb14" gracePeriod=600 Dec 03 07:49:47 crc kubenswrapper[4612]: I1203 07:49:47.933297 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="7105508fd7187feb30bdf8f839f0a6f2f8652223659d23672f28b78428b4cb14" exitCode=0 Dec 03 07:49:47 crc kubenswrapper[4612]: I1203 07:49:47.933386 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"7105508fd7187feb30bdf8f839f0a6f2f8652223659d23672f28b78428b4cb14"} Dec 03 07:49:47 crc kubenswrapper[4612]: I1203 07:49:47.933817 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b"} Dec 03 07:49:47 crc kubenswrapper[4612]: I1203 07:49:47.933844 4612 scope.go:117] "RemoveContainer" containerID="f57887f822733dc51bac24ab820b117594d02a8eaa928e58dcd6bce042c04fbf" Dec 03 07:49:53 crc kubenswrapper[4612]: I1203 07:49:53.997799 4612 generic.go:334] "Generic (PLEG): container finished" podID="08764b2d-6ed4-4495-8338-03d2af8dcbdd" containerID="2b460d9f6a91bc0d1427a965eed82b2f04eff122a8dd35d3c4683a8180e78519" exitCode=0 Dec 03 07:49:53 crc kubenswrapper[4612]: I1203 07:49:53.997834 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"08764b2d-6ed4-4495-8338-03d2af8dcbdd","Type":"ContainerDied","Data":"2b460d9f6a91bc0d1427a965eed82b2f04eff122a8dd35d3c4683a8180e78519"} Dec 03 07:49:54 crc kubenswrapper[4612]: I1203 07:49:54.000170 4612 generic.go:334] "Generic (PLEG): container finished" podID="6ed2435b-f44d-4468-baec-035755359147" containerID="009b4f8ee21fcfad93e42ed9dcf8d40b28908e88bb6ac02721624f56f6b8abe6" exitCode=0 Dec 03 07:49:54 crc kubenswrapper[4612]: I1203 07:49:54.000198 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6ed2435b-f44d-4468-baec-035755359147","Type":"ContainerDied","Data":"009b4f8ee21fcfad93e42ed9dcf8d40b28908e88bb6ac02721624f56f6b8abe6"} Dec 03 07:49:55 crc kubenswrapper[4612]: I1203 07:49:55.064932 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"08764b2d-6ed4-4495-8338-03d2af8dcbdd","Type":"ContainerStarted","Data":"e89c95e1b03d9e2439edf0c3d049d22bc5ab07a3b10c82177b580711660d0722"} Dec 03 07:49:55 crc kubenswrapper[4612]: I1203 07:49:55.066368 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 03 07:49:55 crc kubenswrapper[4612]: I1203 07:49:55.071863 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6ed2435b-f44d-4468-baec-035755359147","Type":"ContainerStarted","Data":"38fb93bae0124ca0dda8d954c8ab5c2b06496413a59fcad755d00b0a3cb19863"} Dec 03 07:49:55 crc kubenswrapper[4612]: I1203 07:49:55.072669 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:49:55 crc kubenswrapper[4612]: I1203 07:49:55.102170 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.102152307 podStartE2EDuration="37.102152307s" podCreationTimestamp="2025-12-03 07:49:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:49:55.097195694 +0000 UTC m=+1358.270553104" watchObservedRunningTime="2025-12-03 07:49:55.102152307 +0000 UTC m=+1358.275509717" Dec 03 07:49:55 crc kubenswrapper[4612]: I1203 07:49:55.135450 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.13542618 podStartE2EDuration="37.13542618s" podCreationTimestamp="2025-12-03 07:49:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 07:49:55.128592161 +0000 UTC m=+1358.301949591" watchObservedRunningTime="2025-12-03 07:49:55.13542618 +0000 UTC m=+1358.308783590" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.070316 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr"] Dec 03 07:49:58 crc kubenswrapper[4612]: E1203 07:49:58.071230 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d61d1d6-8fdd-4349-bae7-c7123aa58742" containerName="init" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.071242 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d61d1d6-8fdd-4349-bae7-c7123aa58742" containerName="init" Dec 03 07:49:58 crc kubenswrapper[4612]: E1203 07:49:58.071262 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d61d1d6-8fdd-4349-bae7-c7123aa58742" containerName="dnsmasq-dns" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.071268 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d61d1d6-8fdd-4349-bae7-c7123aa58742" containerName="dnsmasq-dns" Dec 03 07:49:58 crc kubenswrapper[4612]: E1203 07:49:58.071287 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e2cf39c-6b71-49e5-b776-708ca1bf19f7" containerName="init" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.071295 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e2cf39c-6b71-49e5-b776-708ca1bf19f7" containerName="init" Dec 03 07:49:58 crc kubenswrapper[4612]: E1203 07:49:58.071303 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e2cf39c-6b71-49e5-b776-708ca1bf19f7" containerName="dnsmasq-dns" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.071308 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e2cf39c-6b71-49e5-b776-708ca1bf19f7" containerName="dnsmasq-dns" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.071488 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d61d1d6-8fdd-4349-bae7-c7123aa58742" containerName="dnsmasq-dns" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.071505 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e2cf39c-6b71-49e5-b776-708ca1bf19f7" containerName="dnsmasq-dns" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.072220 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.075773 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.076002 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.076292 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.106058 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr"] Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.112354 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.112489 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.112545 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gr8c4\" (UniqueName: \"kubernetes.io/projected/18af4e51-62fc-4bba-8afd-c8b743e70852-kube-api-access-gr8c4\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.112572 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.112914 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.214186 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.214542 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gr8c4\" (UniqueName: \"kubernetes.io/projected/18af4e51-62fc-4bba-8afd-c8b743e70852-kube-api-access-gr8c4\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.214583 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.214632 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.220784 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.225660 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.230831 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.241546 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gr8c4\" (UniqueName: \"kubernetes.io/projected/18af4e51-62fc-4bba-8afd-c8b743e70852-kube-api-access-gr8c4\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:49:58 crc kubenswrapper[4612]: I1203 07:49:58.389531 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:49:59 crc kubenswrapper[4612]: I1203 07:49:59.039183 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr"] Dec 03 07:49:59 crc kubenswrapper[4612]: W1203 07:49:59.040794 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod18af4e51_62fc_4bba_8afd_c8b743e70852.slice/crio-b9a3e82fc9b9c84ec488234fe1fa9c8a76a0508fdd84061a1eb6d3eb4e726efd WatchSource:0}: Error finding container b9a3e82fc9b9c84ec488234fe1fa9c8a76a0508fdd84061a1eb6d3eb4e726efd: Status 404 returned error can't find the container with id b9a3e82fc9b9c84ec488234fe1fa9c8a76a0508fdd84061a1eb6d3eb4e726efd Dec 03 07:49:59 crc kubenswrapper[4612]: I1203 07:49:59.128361 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" event={"ID":"18af4e51-62fc-4bba-8afd-c8b743e70852","Type":"ContainerStarted","Data":"b9a3e82fc9b9c84ec488234fe1fa9c8a76a0508fdd84061a1eb6d3eb4e726efd"} Dec 03 07:50:09 crc kubenswrapper[4612]: I1203 07:50:09.072304 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 03 07:50:09 crc kubenswrapper[4612]: I1203 07:50:09.283186 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 03 07:50:10 crc kubenswrapper[4612]: I1203 07:50:10.270373 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" event={"ID":"18af4e51-62fc-4bba-8afd-c8b743e70852","Type":"ContainerStarted","Data":"026d523858f46a51d60bcfb53d66d816fdadd45f0e9587f4d4f95be282e716dc"} Dec 03 07:50:10 crc kubenswrapper[4612]: I1203 07:50:10.303594 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" podStartSLOduration=1.983065244 podStartE2EDuration="12.303562785s" podCreationTimestamp="2025-12-03 07:49:58 +0000 UTC" firstStartedPulling="2025-12-03 07:49:59.046174984 +0000 UTC m=+1362.219532384" lastFinishedPulling="2025-12-03 07:50:09.366672525 +0000 UTC m=+1372.540029925" observedRunningTime="2025-12-03 07:50:10.288329628 +0000 UTC m=+1373.461687028" watchObservedRunningTime="2025-12-03 07:50:10.303562785 +0000 UTC m=+1373.476920205" Dec 03 07:50:22 crc kubenswrapper[4612]: I1203 07:50:22.403778 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" event={"ID":"18af4e51-62fc-4bba-8afd-c8b743e70852","Type":"ContainerDied","Data":"026d523858f46a51d60bcfb53d66d816fdadd45f0e9587f4d4f95be282e716dc"} Dec 03 07:50:22 crc kubenswrapper[4612]: I1203 07:50:22.403745 4612 generic.go:334] "Generic (PLEG): container finished" podID="18af4e51-62fc-4bba-8afd-c8b743e70852" containerID="026d523858f46a51d60bcfb53d66d816fdadd45f0e9587f4d4f95be282e716dc" exitCode=0 Dec 03 07:50:23 crc kubenswrapper[4612]: I1203 07:50:23.865120 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.009776 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-inventory\") pod \"18af4e51-62fc-4bba-8afd-c8b743e70852\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.009850 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-ssh-key\") pod \"18af4e51-62fc-4bba-8afd-c8b743e70852\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.009880 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-repo-setup-combined-ca-bundle\") pod \"18af4e51-62fc-4bba-8afd-c8b743e70852\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.010149 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gr8c4\" (UniqueName: \"kubernetes.io/projected/18af4e51-62fc-4bba-8afd-c8b743e70852-kube-api-access-gr8c4\") pod \"18af4e51-62fc-4bba-8afd-c8b743e70852\" (UID: \"18af4e51-62fc-4bba-8afd-c8b743e70852\") " Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.015820 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18af4e51-62fc-4bba-8afd-c8b743e70852-kube-api-access-gr8c4" (OuterVolumeSpecName: "kube-api-access-gr8c4") pod "18af4e51-62fc-4bba-8afd-c8b743e70852" (UID: "18af4e51-62fc-4bba-8afd-c8b743e70852"). InnerVolumeSpecName "kube-api-access-gr8c4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.032374 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "18af4e51-62fc-4bba-8afd-c8b743e70852" (UID: "18af4e51-62fc-4bba-8afd-c8b743e70852"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.041053 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-inventory" (OuterVolumeSpecName: "inventory") pod "18af4e51-62fc-4bba-8afd-c8b743e70852" (UID: "18af4e51-62fc-4bba-8afd-c8b743e70852"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.047450 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "18af4e51-62fc-4bba-8afd-c8b743e70852" (UID: "18af4e51-62fc-4bba-8afd-c8b743e70852"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.112772 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gr8c4\" (UniqueName: \"kubernetes.io/projected/18af4e51-62fc-4bba-8afd-c8b743e70852-kube-api-access-gr8c4\") on node \"crc\" DevicePath \"\"" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.112812 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.112824 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.112833 4612 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18af4e51-62fc-4bba-8afd-c8b743e70852-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.421960 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" event={"ID":"18af4e51-62fc-4bba-8afd-c8b743e70852","Type":"ContainerDied","Data":"b9a3e82fc9b9c84ec488234fe1fa9c8a76a0508fdd84061a1eb6d3eb4e726efd"} Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.422023 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9a3e82fc9b9c84ec488234fe1fa9c8a76a0508fdd84061a1eb6d3eb4e726efd" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.422054 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.544186 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck"] Dec 03 07:50:24 crc kubenswrapper[4612]: E1203 07:50:24.544913 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18af4e51-62fc-4bba-8afd-c8b743e70852" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.544954 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="18af4e51-62fc-4bba-8afd-c8b743e70852" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.545192 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="18af4e51-62fc-4bba-8afd-c8b743e70852" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.545971 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.548342 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.548572 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.549252 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.550284 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.557025 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck"] Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.725901 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41c7b385-5686-4d88-a86a-072eb493e1a2-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-qvcck\" (UID: \"41c7b385-5686-4d88-a86a-072eb493e1a2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.726421 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hpv2\" (UniqueName: \"kubernetes.io/projected/41c7b385-5686-4d88-a86a-072eb493e1a2-kube-api-access-5hpv2\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-qvcck\" (UID: \"41c7b385-5686-4d88-a86a-072eb493e1a2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.726536 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41c7b385-5686-4d88-a86a-072eb493e1a2-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-qvcck\" (UID: \"41c7b385-5686-4d88-a86a-072eb493e1a2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.827928 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hpv2\" (UniqueName: \"kubernetes.io/projected/41c7b385-5686-4d88-a86a-072eb493e1a2-kube-api-access-5hpv2\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-qvcck\" (UID: \"41c7b385-5686-4d88-a86a-072eb493e1a2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.828016 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41c7b385-5686-4d88-a86a-072eb493e1a2-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-qvcck\" (UID: \"41c7b385-5686-4d88-a86a-072eb493e1a2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.828089 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41c7b385-5686-4d88-a86a-072eb493e1a2-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-qvcck\" (UID: \"41c7b385-5686-4d88-a86a-072eb493e1a2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.833376 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41c7b385-5686-4d88-a86a-072eb493e1a2-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-qvcck\" (UID: \"41c7b385-5686-4d88-a86a-072eb493e1a2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.842676 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41c7b385-5686-4d88-a86a-072eb493e1a2-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-qvcck\" (UID: \"41c7b385-5686-4d88-a86a-072eb493e1a2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.854968 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hpv2\" (UniqueName: \"kubernetes.io/projected/41c7b385-5686-4d88-a86a-072eb493e1a2-kube-api-access-5hpv2\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-qvcck\" (UID: \"41c7b385-5686-4d88-a86a-072eb493e1a2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" Dec 03 07:50:24 crc kubenswrapper[4612]: I1203 07:50:24.863581 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" Dec 03 07:50:25 crc kubenswrapper[4612]: I1203 07:50:25.409196 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck"] Dec 03 07:50:25 crc kubenswrapper[4612]: W1203 07:50:25.424183 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41c7b385_5686_4d88_a86a_072eb493e1a2.slice/crio-6c00ac7c8d2043695fd8914df917454d24942d21c1b56737a9395f42c24f179a WatchSource:0}: Error finding container 6c00ac7c8d2043695fd8914df917454d24942d21c1b56737a9395f42c24f179a: Status 404 returned error can't find the container with id 6c00ac7c8d2043695fd8914df917454d24942d21c1b56737a9395f42c24f179a Dec 03 07:50:25 crc kubenswrapper[4612]: I1203 07:50:25.434732 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" event={"ID":"41c7b385-5686-4d88-a86a-072eb493e1a2","Type":"ContainerStarted","Data":"6c00ac7c8d2043695fd8914df917454d24942d21c1b56737a9395f42c24f179a"} Dec 03 07:50:26 crc kubenswrapper[4612]: I1203 07:50:26.445407 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" event={"ID":"41c7b385-5686-4d88-a86a-072eb493e1a2","Type":"ContainerStarted","Data":"b2992d046d4efc41eac38d275b4aa2a1fe8a6ce73ff9d2e9157008ebfec267b4"} Dec 03 07:50:26 crc kubenswrapper[4612]: I1203 07:50:26.466574 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" podStartSLOduration=2.010009794 podStartE2EDuration="2.466554364s" podCreationTimestamp="2025-12-03 07:50:24 +0000 UTC" firstStartedPulling="2025-12-03 07:50:25.427620541 +0000 UTC m=+1388.600977941" lastFinishedPulling="2025-12-03 07:50:25.884165111 +0000 UTC m=+1389.057522511" observedRunningTime="2025-12-03 07:50:26.457030279 +0000 UTC m=+1389.630387679" watchObservedRunningTime="2025-12-03 07:50:26.466554364 +0000 UTC m=+1389.639911764" Dec 03 07:50:29 crc kubenswrapper[4612]: I1203 07:50:29.475725 4612 generic.go:334] "Generic (PLEG): container finished" podID="41c7b385-5686-4d88-a86a-072eb493e1a2" containerID="b2992d046d4efc41eac38d275b4aa2a1fe8a6ce73ff9d2e9157008ebfec267b4" exitCode=0 Dec 03 07:50:29 crc kubenswrapper[4612]: I1203 07:50:29.475825 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" event={"ID":"41c7b385-5686-4d88-a86a-072eb493e1a2","Type":"ContainerDied","Data":"b2992d046d4efc41eac38d275b4aa2a1fe8a6ce73ff9d2e9157008ebfec267b4"} Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.071406 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.259902 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41c7b385-5686-4d88-a86a-072eb493e1a2-inventory\") pod \"41c7b385-5686-4d88-a86a-072eb493e1a2\" (UID: \"41c7b385-5686-4d88-a86a-072eb493e1a2\") " Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.260327 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41c7b385-5686-4d88-a86a-072eb493e1a2-ssh-key\") pod \"41c7b385-5686-4d88-a86a-072eb493e1a2\" (UID: \"41c7b385-5686-4d88-a86a-072eb493e1a2\") " Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.260428 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hpv2\" (UniqueName: \"kubernetes.io/projected/41c7b385-5686-4d88-a86a-072eb493e1a2-kube-api-access-5hpv2\") pod \"41c7b385-5686-4d88-a86a-072eb493e1a2\" (UID: \"41c7b385-5686-4d88-a86a-072eb493e1a2\") " Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.266333 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41c7b385-5686-4d88-a86a-072eb493e1a2-kube-api-access-5hpv2" (OuterVolumeSpecName: "kube-api-access-5hpv2") pod "41c7b385-5686-4d88-a86a-072eb493e1a2" (UID: "41c7b385-5686-4d88-a86a-072eb493e1a2"). InnerVolumeSpecName "kube-api-access-5hpv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.290994 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41c7b385-5686-4d88-a86a-072eb493e1a2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "41c7b385-5686-4d88-a86a-072eb493e1a2" (UID: "41c7b385-5686-4d88-a86a-072eb493e1a2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.309350 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41c7b385-5686-4d88-a86a-072eb493e1a2-inventory" (OuterVolumeSpecName: "inventory") pod "41c7b385-5686-4d88-a86a-072eb493e1a2" (UID: "41c7b385-5686-4d88-a86a-072eb493e1a2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.363758 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41c7b385-5686-4d88-a86a-072eb493e1a2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.363798 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hpv2\" (UniqueName: \"kubernetes.io/projected/41c7b385-5686-4d88-a86a-072eb493e1a2-kube-api-access-5hpv2\") on node \"crc\" DevicePath \"\"" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.363812 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41c7b385-5686-4d88-a86a-072eb493e1a2-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.501757 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" event={"ID":"41c7b385-5686-4d88-a86a-072eb493e1a2","Type":"ContainerDied","Data":"6c00ac7c8d2043695fd8914df917454d24942d21c1b56737a9395f42c24f179a"} Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.501797 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c00ac7c8d2043695fd8914df917454d24942d21c1b56737a9395f42c24f179a" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.501852 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-qvcck" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.674904 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8"] Dec 03 07:50:31 crc kubenswrapper[4612]: E1203 07:50:31.676428 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c7b385-5686-4d88-a86a-072eb493e1a2" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.676464 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c7b385-5686-4d88-a86a-072eb493e1a2" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.676987 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="41c7b385-5686-4d88-a86a-072eb493e1a2" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.678073 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.681894 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.685271 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.685403 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.685502 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.704721 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8"] Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.778423 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.778674 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.778757 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw8db\" (UniqueName: \"kubernetes.io/projected/8d491f1b-5bbf-4508-8ddc-2e986613d792-kube-api-access-gw8db\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.778919 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.880611 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.880674 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw8db\" (UniqueName: \"kubernetes.io/projected/8d491f1b-5bbf-4508-8ddc-2e986613d792-kube-api-access-gw8db\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.880750 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.880815 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.885300 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.886410 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.886733 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:50:31 crc kubenswrapper[4612]: I1203 07:50:31.899751 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw8db\" (UniqueName: \"kubernetes.io/projected/8d491f1b-5bbf-4508-8ddc-2e986613d792-kube-api-access-gw8db\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:50:32 crc kubenswrapper[4612]: I1203 07:50:32.009575 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:50:32 crc kubenswrapper[4612]: I1203 07:50:32.586839 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8"] Dec 03 07:50:33 crc kubenswrapper[4612]: I1203 07:50:33.523359 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" event={"ID":"8d491f1b-5bbf-4508-8ddc-2e986613d792","Type":"ContainerStarted","Data":"cad5965581b81962404ddc7ca3fc618dc7e6d1548dcfb868371785f7af51b27e"} Dec 03 07:50:34 crc kubenswrapper[4612]: I1203 07:50:34.532549 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" event={"ID":"8d491f1b-5bbf-4508-8ddc-2e986613d792","Type":"ContainerStarted","Data":"2497be3c0ee801b7ab7ea6f2dea4322e3a43f074f18888452792ca4bb2befd20"} Dec 03 07:50:34 crc kubenswrapper[4612]: I1203 07:50:34.557396 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" podStartSLOduration=2.879529262 podStartE2EDuration="3.557374255s" podCreationTimestamp="2025-12-03 07:50:31 +0000 UTC" firstStartedPulling="2025-12-03 07:50:32.600023528 +0000 UTC m=+1395.773380938" lastFinishedPulling="2025-12-03 07:50:33.277868531 +0000 UTC m=+1396.451225931" observedRunningTime="2025-12-03 07:50:34.552308439 +0000 UTC m=+1397.725665869" watchObservedRunningTime="2025-12-03 07:50:34.557374255 +0000 UTC m=+1397.730731655" Dec 03 07:50:43 crc kubenswrapper[4612]: I1203 07:50:43.190403 4612 scope.go:117] "RemoveContainer" containerID="99042361a5043bc8cb76409151afbb098a169d6024827d41c6c9f7e3f21d3361" Dec 03 07:51:39 crc kubenswrapper[4612]: I1203 07:51:39.733633 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ct4c4"] Dec 03 07:51:39 crc kubenswrapper[4612]: I1203 07:51:39.740822 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:51:39 crc kubenswrapper[4612]: I1203 07:51:39.766681 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ct4c4"] Dec 03 07:51:39 crc kubenswrapper[4612]: I1203 07:51:39.907879 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgdpk\" (UniqueName: \"kubernetes.io/projected/6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619-kube-api-access-zgdpk\") pod \"certified-operators-ct4c4\" (UID: \"6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619\") " pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:51:39 crc kubenswrapper[4612]: I1203 07:51:39.907926 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619-catalog-content\") pod \"certified-operators-ct4c4\" (UID: \"6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619\") " pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:51:39 crc kubenswrapper[4612]: I1203 07:51:39.907996 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619-utilities\") pod \"certified-operators-ct4c4\" (UID: \"6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619\") " pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:51:40 crc kubenswrapper[4612]: I1203 07:51:40.010080 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgdpk\" (UniqueName: \"kubernetes.io/projected/6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619-kube-api-access-zgdpk\") pod \"certified-operators-ct4c4\" (UID: \"6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619\") " pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:51:40 crc kubenswrapper[4612]: I1203 07:51:40.010127 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619-catalog-content\") pod \"certified-operators-ct4c4\" (UID: \"6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619\") " pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:51:40 crc kubenswrapper[4612]: I1203 07:51:40.010159 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619-utilities\") pod \"certified-operators-ct4c4\" (UID: \"6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619\") " pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:51:40 crc kubenswrapper[4612]: I1203 07:51:40.010845 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619-catalog-content\") pod \"certified-operators-ct4c4\" (UID: \"6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619\") " pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:51:40 crc kubenswrapper[4612]: I1203 07:51:40.010851 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619-utilities\") pod \"certified-operators-ct4c4\" (UID: \"6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619\") " pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:51:40 crc kubenswrapper[4612]: I1203 07:51:40.028677 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgdpk\" (UniqueName: \"kubernetes.io/projected/6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619-kube-api-access-zgdpk\") pod \"certified-operators-ct4c4\" (UID: \"6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619\") " pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:51:40 crc kubenswrapper[4612]: I1203 07:51:40.064295 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:51:40 crc kubenswrapper[4612]: I1203 07:51:40.506696 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ct4c4"] Dec 03 07:51:41 crc kubenswrapper[4612]: I1203 07:51:41.226468 4612 generic.go:334] "Generic (PLEG): container finished" podID="6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619" containerID="5613ce03b41930cd8ccefa6a6f9324a7d8da55f8376add706b204d98abff8aa7" exitCode=0 Dec 03 07:51:41 crc kubenswrapper[4612]: I1203 07:51:41.226830 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ct4c4" event={"ID":"6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619","Type":"ContainerDied","Data":"5613ce03b41930cd8ccefa6a6f9324a7d8da55f8376add706b204d98abff8aa7"} Dec 03 07:51:41 crc kubenswrapper[4612]: I1203 07:51:41.226859 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ct4c4" event={"ID":"6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619","Type":"ContainerStarted","Data":"6fd96818852b8d93da47ca633215765e83586d9dc0a3d4161ca0cc1a2e7ade53"} Dec 03 07:51:43 crc kubenswrapper[4612]: I1203 07:51:43.285593 4612 scope.go:117] "RemoveContainer" containerID="e975ac6a0d365acd57746c87807445d16de1ad7a5dc638e4bceb129d51a5c361" Dec 03 07:51:47 crc kubenswrapper[4612]: I1203 07:51:47.135716 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:51:47 crc kubenswrapper[4612]: I1203 07:51:47.136297 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:51:47 crc kubenswrapper[4612]: I1203 07:51:47.291126 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ct4c4" event={"ID":"6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619","Type":"ContainerStarted","Data":"a40be0d80e950a9c3530043485946cb53823d615074d5873debf5ac2484ad357"} Dec 03 07:51:49 crc kubenswrapper[4612]: I1203 07:51:49.308802 4612 generic.go:334] "Generic (PLEG): container finished" podID="6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619" containerID="a40be0d80e950a9c3530043485946cb53823d615074d5873debf5ac2484ad357" exitCode=0 Dec 03 07:51:49 crc kubenswrapper[4612]: I1203 07:51:49.309907 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ct4c4" event={"ID":"6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619","Type":"ContainerDied","Data":"a40be0d80e950a9c3530043485946cb53823d615074d5873debf5ac2484ad357"} Dec 03 07:51:51 crc kubenswrapper[4612]: I1203 07:51:51.329960 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ct4c4" event={"ID":"6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619","Type":"ContainerStarted","Data":"30f3b5ad97dac6c173877e5039af09b587d6f278c8669506e4f0ca57acfde633"} Dec 03 07:52:00 crc kubenswrapper[4612]: I1203 07:52:00.065460 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:52:00 crc kubenswrapper[4612]: I1203 07:52:00.065892 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:52:00 crc kubenswrapper[4612]: I1203 07:52:00.113194 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:52:00 crc kubenswrapper[4612]: I1203 07:52:00.130383 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ct4c4" podStartSLOduration=12.290929062 podStartE2EDuration="21.130359966s" podCreationTimestamp="2025-12-03 07:51:39 +0000 UTC" firstStartedPulling="2025-12-03 07:51:41.228097824 +0000 UTC m=+1464.401455224" lastFinishedPulling="2025-12-03 07:51:50.067528728 +0000 UTC m=+1473.240886128" observedRunningTime="2025-12-03 07:51:51.355635703 +0000 UTC m=+1474.528993143" watchObservedRunningTime="2025-12-03 07:52:00.130359966 +0000 UTC m=+1483.303717366" Dec 03 07:52:00 crc kubenswrapper[4612]: I1203 07:52:00.494018 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ct4c4" Dec 03 07:52:00 crc kubenswrapper[4612]: I1203 07:52:00.572828 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ct4c4"] Dec 03 07:52:00 crc kubenswrapper[4612]: I1203 07:52:00.620335 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6hqst"] Dec 03 07:52:00 crc kubenswrapper[4612]: I1203 07:52:00.620594 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6hqst" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" containerName="registry-server" containerID="cri-o://1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f" gracePeriod=2 Dec 03 07:52:01 crc kubenswrapper[4612]: E1203 07:52:01.010933 4612 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f is running failed: container process not found" containerID="1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 07:52:01 crc kubenswrapper[4612]: E1203 07:52:01.011634 4612 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f is running failed: container process not found" containerID="1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 07:52:01 crc kubenswrapper[4612]: E1203 07:52:01.012005 4612 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f is running failed: container process not found" containerID="1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 07:52:01 crc kubenswrapper[4612]: E1203 07:52:01.012074 4612 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-6hqst" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" containerName="registry-server" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.073092 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.242705 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-catalog-content\") pod \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\" (UID: \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\") " Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.242905 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpg9l\" (UniqueName: \"kubernetes.io/projected/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-kube-api-access-hpg9l\") pod \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\" (UID: \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\") " Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.242981 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-utilities\") pod \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\" (UID: \"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd\") " Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.247093 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-utilities" (OuterVolumeSpecName: "utilities") pod "f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" (UID: "f93ba0d0-905d-4b21-bb11-30fcf92ed7bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.249028 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-kube-api-access-hpg9l" (OuterVolumeSpecName: "kube-api-access-hpg9l") pod "f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" (UID: "f93ba0d0-905d-4b21-bb11-30fcf92ed7bd"). InnerVolumeSpecName "kube-api-access-hpg9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.304456 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" (UID: "f93ba0d0-905d-4b21-bb11-30fcf92ed7bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.346008 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.346057 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.346069 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpg9l\" (UniqueName: \"kubernetes.io/projected/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd-kube-api-access-hpg9l\") on node \"crc\" DevicePath \"\"" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.443515 4612 generic.go:334] "Generic (PLEG): container finished" podID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" containerID="1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f" exitCode=0 Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.443583 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6hqst" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.443629 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6hqst" event={"ID":"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd","Type":"ContainerDied","Data":"1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f"} Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.443657 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6hqst" event={"ID":"f93ba0d0-905d-4b21-bb11-30fcf92ed7bd","Type":"ContainerDied","Data":"8a8795cb491aba6bc65bfa8e9b6b94ca58864a353228b11dc8c82cbc5b31a4aa"} Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.443673 4612 scope.go:117] "RemoveContainer" containerID="1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.467413 4612 scope.go:117] "RemoveContainer" containerID="61b5fa07214e07e4f9773be0f48a511cfa4ac4cb2ed5c4b917fb4bb5a31184a3" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.486498 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6hqst"] Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.500989 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6hqst"] Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.502417 4612 scope.go:117] "RemoveContainer" containerID="27a4b321f045e209c43d532d4f5523c3335411ee5d97da905775080df126a865" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.539089 4612 scope.go:117] "RemoveContainer" containerID="1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f" Dec 03 07:52:01 crc kubenswrapper[4612]: E1203 07:52:01.539520 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f\": container with ID starting with 1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f not found: ID does not exist" containerID="1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.539552 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f"} err="failed to get container status \"1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f\": rpc error: code = NotFound desc = could not find container \"1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f\": container with ID starting with 1f1bc0fa12aa2e0810ca0cdc803bf0513f9812c36d010b247b3e6d53e2dd3f5f not found: ID does not exist" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.539571 4612 scope.go:117] "RemoveContainer" containerID="61b5fa07214e07e4f9773be0f48a511cfa4ac4cb2ed5c4b917fb4bb5a31184a3" Dec 03 07:52:01 crc kubenswrapper[4612]: E1203 07:52:01.539780 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61b5fa07214e07e4f9773be0f48a511cfa4ac4cb2ed5c4b917fb4bb5a31184a3\": container with ID starting with 61b5fa07214e07e4f9773be0f48a511cfa4ac4cb2ed5c4b917fb4bb5a31184a3 not found: ID does not exist" containerID="61b5fa07214e07e4f9773be0f48a511cfa4ac4cb2ed5c4b917fb4bb5a31184a3" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.539802 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61b5fa07214e07e4f9773be0f48a511cfa4ac4cb2ed5c4b917fb4bb5a31184a3"} err="failed to get container status \"61b5fa07214e07e4f9773be0f48a511cfa4ac4cb2ed5c4b917fb4bb5a31184a3\": rpc error: code = NotFound desc = could not find container \"61b5fa07214e07e4f9773be0f48a511cfa4ac4cb2ed5c4b917fb4bb5a31184a3\": container with ID starting with 61b5fa07214e07e4f9773be0f48a511cfa4ac4cb2ed5c4b917fb4bb5a31184a3 not found: ID does not exist" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.539815 4612 scope.go:117] "RemoveContainer" containerID="27a4b321f045e209c43d532d4f5523c3335411ee5d97da905775080df126a865" Dec 03 07:52:01 crc kubenswrapper[4612]: E1203 07:52:01.539987 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27a4b321f045e209c43d532d4f5523c3335411ee5d97da905775080df126a865\": container with ID starting with 27a4b321f045e209c43d532d4f5523c3335411ee5d97da905775080df126a865 not found: ID does not exist" containerID="27a4b321f045e209c43d532d4f5523c3335411ee5d97da905775080df126a865" Dec 03 07:52:01 crc kubenswrapper[4612]: I1203 07:52:01.540003 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27a4b321f045e209c43d532d4f5523c3335411ee5d97da905775080df126a865"} err="failed to get container status \"27a4b321f045e209c43d532d4f5523c3335411ee5d97da905775080df126a865\": rpc error: code = NotFound desc = could not find container \"27a4b321f045e209c43d532d4f5523c3335411ee5d97da905775080df126a865\": container with ID starting with 27a4b321f045e209c43d532d4f5523c3335411ee5d97da905775080df126a865 not found: ID does not exist" Dec 03 07:52:03 crc kubenswrapper[4612]: I1203 07:52:03.101052 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" path="/var/lib/kubelet/pods/f93ba0d0-905d-4b21-bb11-30fcf92ed7bd/volumes" Dec 03 07:52:17 crc kubenswrapper[4612]: I1203 07:52:17.135783 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:52:17 crc kubenswrapper[4612]: I1203 07:52:17.136289 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.670504 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-v6m4m"] Dec 03 07:52:36 crc kubenswrapper[4612]: E1203 07:52:36.671582 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" containerName="extract-utilities" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.671598 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" containerName="extract-utilities" Dec 03 07:52:36 crc kubenswrapper[4612]: E1203 07:52:36.671636 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" containerName="extract-content" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.671644 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" containerName="extract-content" Dec 03 07:52:36 crc kubenswrapper[4612]: E1203 07:52:36.671660 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" containerName="registry-server" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.671669 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" containerName="registry-server" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.671884 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f93ba0d0-905d-4b21-bb11-30fcf92ed7bd" containerName="registry-server" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.673470 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.678508 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v6m4m"] Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.793723 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9xx2\" (UniqueName: \"kubernetes.io/projected/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-kube-api-access-l9xx2\") pod \"redhat-operators-v6m4m\" (UID: \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\") " pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.793804 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-catalog-content\") pod \"redhat-operators-v6m4m\" (UID: \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\") " pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.793963 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-utilities\") pod \"redhat-operators-v6m4m\" (UID: \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\") " pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.895209 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-utilities\") pod \"redhat-operators-v6m4m\" (UID: \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\") " pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.895524 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9xx2\" (UniqueName: \"kubernetes.io/projected/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-kube-api-access-l9xx2\") pod \"redhat-operators-v6m4m\" (UID: \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\") " pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.895626 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-catalog-content\") pod \"redhat-operators-v6m4m\" (UID: \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\") " pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.895820 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-utilities\") pod \"redhat-operators-v6m4m\" (UID: \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\") " pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.896006 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-catalog-content\") pod \"redhat-operators-v6m4m\" (UID: \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\") " pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:36 crc kubenswrapper[4612]: I1203 07:52:36.920130 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9xx2\" (UniqueName: \"kubernetes.io/projected/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-kube-api-access-l9xx2\") pod \"redhat-operators-v6m4m\" (UID: \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\") " pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:37 crc kubenswrapper[4612]: I1203 07:52:37.003008 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:37 crc kubenswrapper[4612]: I1203 07:52:37.532975 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v6m4m"] Dec 03 07:52:37 crc kubenswrapper[4612]: I1203 07:52:37.826767 4612 generic.go:334] "Generic (PLEG): container finished" podID="ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" containerID="aae0e109e6b04290bf85faa6f8174d1ae35316d1b242567a82aeccb47aea632a" exitCode=0 Dec 03 07:52:37 crc kubenswrapper[4612]: I1203 07:52:37.826818 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v6m4m" event={"ID":"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f","Type":"ContainerDied","Data":"aae0e109e6b04290bf85faa6f8174d1ae35316d1b242567a82aeccb47aea632a"} Dec 03 07:52:37 crc kubenswrapper[4612]: I1203 07:52:37.827024 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v6m4m" event={"ID":"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f","Type":"ContainerStarted","Data":"c5c283e0830bbd41a2c4b95c622b07c5f273f44f387e468de71637df42d4539a"} Dec 03 07:52:38 crc kubenswrapper[4612]: I1203 07:52:38.836983 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v6m4m" event={"ID":"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f","Type":"ContainerStarted","Data":"5bec4bf730d1cfe4207b9f4b1e83a52dabbc946765bb26760eb6099ede93b344"} Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.364624 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8glvt"] Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.368068 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.398039 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8glvt"] Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.510524 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87qtn\" (UniqueName: \"kubernetes.io/projected/43345f05-916f-4417-aa70-b781e4da082e-kube-api-access-87qtn\") pod \"redhat-marketplace-8glvt\" (UID: \"43345f05-916f-4417-aa70-b781e4da082e\") " pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.510715 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43345f05-916f-4417-aa70-b781e4da082e-catalog-content\") pod \"redhat-marketplace-8glvt\" (UID: \"43345f05-916f-4417-aa70-b781e4da082e\") " pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.510818 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43345f05-916f-4417-aa70-b781e4da082e-utilities\") pod \"redhat-marketplace-8glvt\" (UID: \"43345f05-916f-4417-aa70-b781e4da082e\") " pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.613307 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43345f05-916f-4417-aa70-b781e4da082e-catalog-content\") pod \"redhat-marketplace-8glvt\" (UID: \"43345f05-916f-4417-aa70-b781e4da082e\") " pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.612551 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43345f05-916f-4417-aa70-b781e4da082e-catalog-content\") pod \"redhat-marketplace-8glvt\" (UID: \"43345f05-916f-4417-aa70-b781e4da082e\") " pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.613462 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43345f05-916f-4417-aa70-b781e4da082e-utilities\") pod \"redhat-marketplace-8glvt\" (UID: \"43345f05-916f-4417-aa70-b781e4da082e\") " pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.613924 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43345f05-916f-4417-aa70-b781e4da082e-utilities\") pod \"redhat-marketplace-8glvt\" (UID: \"43345f05-916f-4417-aa70-b781e4da082e\") " pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.614323 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87qtn\" (UniqueName: \"kubernetes.io/projected/43345f05-916f-4417-aa70-b781e4da082e-kube-api-access-87qtn\") pod \"redhat-marketplace-8glvt\" (UID: \"43345f05-916f-4417-aa70-b781e4da082e\") " pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.637124 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87qtn\" (UniqueName: \"kubernetes.io/projected/43345f05-916f-4417-aa70-b781e4da082e-kube-api-access-87qtn\") pod \"redhat-marketplace-8glvt\" (UID: \"43345f05-916f-4417-aa70-b781e4da082e\") " pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.689221 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.892516 4612 generic.go:334] "Generic (PLEG): container finished" podID="ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" containerID="5bec4bf730d1cfe4207b9f4b1e83a52dabbc946765bb26760eb6099ede93b344" exitCode=0 Dec 03 07:52:42 crc kubenswrapper[4612]: I1203 07:52:42.892827 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v6m4m" event={"ID":"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f","Type":"ContainerDied","Data":"5bec4bf730d1cfe4207b9f4b1e83a52dabbc946765bb26760eb6099ede93b344"} Dec 03 07:52:43 crc kubenswrapper[4612]: I1203 07:52:43.134492 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8glvt"] Dec 03 07:52:43 crc kubenswrapper[4612]: W1203 07:52:43.141310 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod43345f05_916f_4417_aa70_b781e4da082e.slice/crio-cb354f95f4f4133fe54e8d3b1cc8b6b89fadf309777b1c6f3139c05dd144e456 WatchSource:0}: Error finding container cb354f95f4f4133fe54e8d3b1cc8b6b89fadf309777b1c6f3139c05dd144e456: Status 404 returned error can't find the container with id cb354f95f4f4133fe54e8d3b1cc8b6b89fadf309777b1c6f3139c05dd144e456 Dec 03 07:52:43 crc kubenswrapper[4612]: I1203 07:52:43.373509 4612 scope.go:117] "RemoveContainer" containerID="3a4b20c303e1f22cc2642f57344a45df151bb467d5f19e0bc5e382d3a18c6c62" Dec 03 07:52:43 crc kubenswrapper[4612]: I1203 07:52:43.409805 4612 scope.go:117] "RemoveContainer" containerID="c13073aeadea899f842d6536a2fdbbdb1ff96e3954511c5249ed62939427b063" Dec 03 07:52:43 crc kubenswrapper[4612]: I1203 07:52:43.902482 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v6m4m" event={"ID":"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f","Type":"ContainerStarted","Data":"04fa6013b847e107c8a387d600a0da52bd98613060b95abb406dc224642bad09"} Dec 03 07:52:43 crc kubenswrapper[4612]: I1203 07:52:43.903761 4612 generic.go:334] "Generic (PLEG): container finished" podID="43345f05-916f-4417-aa70-b781e4da082e" containerID="690bd828c84a36dbcc78a33ecf2f2479854bcab7041aa1037503e5b8436bf612" exitCode=0 Dec 03 07:52:43 crc kubenswrapper[4612]: I1203 07:52:43.903797 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8glvt" event={"ID":"43345f05-916f-4417-aa70-b781e4da082e","Type":"ContainerDied","Data":"690bd828c84a36dbcc78a33ecf2f2479854bcab7041aa1037503e5b8436bf612"} Dec 03 07:52:43 crc kubenswrapper[4612]: I1203 07:52:43.903818 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8glvt" event={"ID":"43345f05-916f-4417-aa70-b781e4da082e","Type":"ContainerStarted","Data":"cb354f95f4f4133fe54e8d3b1cc8b6b89fadf309777b1c6f3139c05dd144e456"} Dec 03 07:52:43 crc kubenswrapper[4612]: I1203 07:52:43.927902 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-v6m4m" podStartSLOduration=2.369638982 podStartE2EDuration="7.927884721s" podCreationTimestamp="2025-12-03 07:52:36 +0000 UTC" firstStartedPulling="2025-12-03 07:52:37.828985341 +0000 UTC m=+1521.002342741" lastFinishedPulling="2025-12-03 07:52:43.38723108 +0000 UTC m=+1526.560588480" observedRunningTime="2025-12-03 07:52:43.921307088 +0000 UTC m=+1527.094664488" watchObservedRunningTime="2025-12-03 07:52:43.927884721 +0000 UTC m=+1527.101242121" Dec 03 07:52:44 crc kubenswrapper[4612]: I1203 07:52:44.914867 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8glvt" event={"ID":"43345f05-916f-4417-aa70-b781e4da082e","Type":"ContainerStarted","Data":"35a4531a24f19fe78bb22aa2857795d89d79d11300e097343f34c543aea1cdf7"} Dec 03 07:52:45 crc kubenswrapper[4612]: I1203 07:52:45.928316 4612 generic.go:334] "Generic (PLEG): container finished" podID="43345f05-916f-4417-aa70-b781e4da082e" containerID="35a4531a24f19fe78bb22aa2857795d89d79d11300e097343f34c543aea1cdf7" exitCode=0 Dec 03 07:52:45 crc kubenswrapper[4612]: I1203 07:52:45.928367 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8glvt" event={"ID":"43345f05-916f-4417-aa70-b781e4da082e","Type":"ContainerDied","Data":"35a4531a24f19fe78bb22aa2857795d89d79d11300e097343f34c543aea1cdf7"} Dec 03 07:52:46 crc kubenswrapper[4612]: I1203 07:52:46.938502 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8glvt" event={"ID":"43345f05-916f-4417-aa70-b781e4da082e","Type":"ContainerStarted","Data":"507176d03e1704234db587d31b97f9de5937cc16c5482a9a0386f63100058f44"} Dec 03 07:52:46 crc kubenswrapper[4612]: I1203 07:52:46.961401 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8glvt" podStartSLOduration=2.461880716 podStartE2EDuration="4.96136983s" podCreationTimestamp="2025-12-03 07:52:42 +0000 UTC" firstStartedPulling="2025-12-03 07:52:43.905756883 +0000 UTC m=+1527.079114273" lastFinishedPulling="2025-12-03 07:52:46.405245987 +0000 UTC m=+1529.578603387" observedRunningTime="2025-12-03 07:52:46.957244548 +0000 UTC m=+1530.130601948" watchObservedRunningTime="2025-12-03 07:52:46.96136983 +0000 UTC m=+1530.134727230" Dec 03 07:52:47 crc kubenswrapper[4612]: I1203 07:52:47.003843 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:47 crc kubenswrapper[4612]: I1203 07:52:47.003905 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:47 crc kubenswrapper[4612]: I1203 07:52:47.135927 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 07:52:47 crc kubenswrapper[4612]: I1203 07:52:47.136009 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 07:52:47 crc kubenswrapper[4612]: I1203 07:52:47.136057 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 07:52:47 crc kubenswrapper[4612]: I1203 07:52:47.136556 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 07:52:47 crc kubenswrapper[4612]: I1203 07:52:47.136618 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" gracePeriod=600 Dec 03 07:52:47 crc kubenswrapper[4612]: E1203 07:52:47.494395 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:52:47 crc kubenswrapper[4612]: I1203 07:52:47.953643 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" exitCode=0 Dec 03 07:52:47 crc kubenswrapper[4612]: I1203 07:52:47.953850 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b"} Dec 03 07:52:47 crc kubenswrapper[4612]: I1203 07:52:47.954028 4612 scope.go:117] "RemoveContainer" containerID="7105508fd7187feb30bdf8f839f0a6f2f8652223659d23672f28b78428b4cb14" Dec 03 07:52:47 crc kubenswrapper[4612]: I1203 07:52:47.954963 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:52:47 crc kubenswrapper[4612]: E1203 07:52:47.955313 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:52:48 crc kubenswrapper[4612]: I1203 07:52:48.051883 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-v6m4m" podUID="ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" containerName="registry-server" probeResult="failure" output=< Dec 03 07:52:48 crc kubenswrapper[4612]: timeout: failed to connect service ":50051" within 1s Dec 03 07:52:48 crc kubenswrapper[4612]: > Dec 03 07:52:52 crc kubenswrapper[4612]: I1203 07:52:52.689415 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:52 crc kubenswrapper[4612]: I1203 07:52:52.690014 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:52 crc kubenswrapper[4612]: I1203 07:52:52.740387 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:53 crc kubenswrapper[4612]: I1203 07:52:53.085727 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:53 crc kubenswrapper[4612]: I1203 07:52:53.154034 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8glvt"] Dec 03 07:52:55 crc kubenswrapper[4612]: I1203 07:52:55.026787 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8glvt" podUID="43345f05-916f-4417-aa70-b781e4da082e" containerName="registry-server" containerID="cri-o://507176d03e1704234db587d31b97f9de5937cc16c5482a9a0386f63100058f44" gracePeriod=2 Dec 03 07:52:55 crc kubenswrapper[4612]: I1203 07:52:55.522518 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:55 crc kubenswrapper[4612]: I1203 07:52:55.679724 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43345f05-916f-4417-aa70-b781e4da082e-utilities\") pod \"43345f05-916f-4417-aa70-b781e4da082e\" (UID: \"43345f05-916f-4417-aa70-b781e4da082e\") " Dec 03 07:52:55 crc kubenswrapper[4612]: I1203 07:52:55.679920 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43345f05-916f-4417-aa70-b781e4da082e-catalog-content\") pod \"43345f05-916f-4417-aa70-b781e4da082e\" (UID: \"43345f05-916f-4417-aa70-b781e4da082e\") " Dec 03 07:52:55 crc kubenswrapper[4612]: I1203 07:52:55.679972 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87qtn\" (UniqueName: \"kubernetes.io/projected/43345f05-916f-4417-aa70-b781e4da082e-kube-api-access-87qtn\") pod \"43345f05-916f-4417-aa70-b781e4da082e\" (UID: \"43345f05-916f-4417-aa70-b781e4da082e\") " Dec 03 07:52:55 crc kubenswrapper[4612]: I1203 07:52:55.680858 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43345f05-916f-4417-aa70-b781e4da082e-utilities" (OuterVolumeSpecName: "utilities") pod "43345f05-916f-4417-aa70-b781e4da082e" (UID: "43345f05-916f-4417-aa70-b781e4da082e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:52:55 crc kubenswrapper[4612]: I1203 07:52:55.694099 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43345f05-916f-4417-aa70-b781e4da082e-kube-api-access-87qtn" (OuterVolumeSpecName: "kube-api-access-87qtn") pod "43345f05-916f-4417-aa70-b781e4da082e" (UID: "43345f05-916f-4417-aa70-b781e4da082e"). InnerVolumeSpecName "kube-api-access-87qtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:52:55 crc kubenswrapper[4612]: I1203 07:52:55.701048 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43345f05-916f-4417-aa70-b781e4da082e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "43345f05-916f-4417-aa70-b781e4da082e" (UID: "43345f05-916f-4417-aa70-b781e4da082e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:52:55 crc kubenswrapper[4612]: I1203 07:52:55.782567 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43345f05-916f-4417-aa70-b781e4da082e-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:52:55 crc kubenswrapper[4612]: I1203 07:52:55.782610 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43345f05-916f-4417-aa70-b781e4da082e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:52:55 crc kubenswrapper[4612]: I1203 07:52:55.782625 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87qtn\" (UniqueName: \"kubernetes.io/projected/43345f05-916f-4417-aa70-b781e4da082e-kube-api-access-87qtn\") on node \"crc\" DevicePath \"\"" Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.037272 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8glvt" Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.037300 4612 generic.go:334] "Generic (PLEG): container finished" podID="43345f05-916f-4417-aa70-b781e4da082e" containerID="507176d03e1704234db587d31b97f9de5937cc16c5482a9a0386f63100058f44" exitCode=0 Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.037275 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8glvt" event={"ID":"43345f05-916f-4417-aa70-b781e4da082e","Type":"ContainerDied","Data":"507176d03e1704234db587d31b97f9de5937cc16c5482a9a0386f63100058f44"} Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.037373 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8glvt" event={"ID":"43345f05-916f-4417-aa70-b781e4da082e","Type":"ContainerDied","Data":"cb354f95f4f4133fe54e8d3b1cc8b6b89fadf309777b1c6f3139c05dd144e456"} Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.037394 4612 scope.go:117] "RemoveContainer" containerID="507176d03e1704234db587d31b97f9de5937cc16c5482a9a0386f63100058f44" Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.061430 4612 scope.go:117] "RemoveContainer" containerID="35a4531a24f19fe78bb22aa2857795d89d79d11300e097343f34c543aea1cdf7" Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.076836 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8glvt"] Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.085150 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8glvt"] Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.087872 4612 scope.go:117] "RemoveContainer" containerID="690bd828c84a36dbcc78a33ecf2f2479854bcab7041aa1037503e5b8436bf612" Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.128093 4612 scope.go:117] "RemoveContainer" containerID="507176d03e1704234db587d31b97f9de5937cc16c5482a9a0386f63100058f44" Dec 03 07:52:56 crc kubenswrapper[4612]: E1203 07:52:56.128555 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"507176d03e1704234db587d31b97f9de5937cc16c5482a9a0386f63100058f44\": container with ID starting with 507176d03e1704234db587d31b97f9de5937cc16c5482a9a0386f63100058f44 not found: ID does not exist" containerID="507176d03e1704234db587d31b97f9de5937cc16c5482a9a0386f63100058f44" Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.128607 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"507176d03e1704234db587d31b97f9de5937cc16c5482a9a0386f63100058f44"} err="failed to get container status \"507176d03e1704234db587d31b97f9de5937cc16c5482a9a0386f63100058f44\": rpc error: code = NotFound desc = could not find container \"507176d03e1704234db587d31b97f9de5937cc16c5482a9a0386f63100058f44\": container with ID starting with 507176d03e1704234db587d31b97f9de5937cc16c5482a9a0386f63100058f44 not found: ID does not exist" Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.128636 4612 scope.go:117] "RemoveContainer" containerID="35a4531a24f19fe78bb22aa2857795d89d79d11300e097343f34c543aea1cdf7" Dec 03 07:52:56 crc kubenswrapper[4612]: E1203 07:52:56.129070 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35a4531a24f19fe78bb22aa2857795d89d79d11300e097343f34c543aea1cdf7\": container with ID starting with 35a4531a24f19fe78bb22aa2857795d89d79d11300e097343f34c543aea1cdf7 not found: ID does not exist" containerID="35a4531a24f19fe78bb22aa2857795d89d79d11300e097343f34c543aea1cdf7" Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.129091 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35a4531a24f19fe78bb22aa2857795d89d79d11300e097343f34c543aea1cdf7"} err="failed to get container status \"35a4531a24f19fe78bb22aa2857795d89d79d11300e097343f34c543aea1cdf7\": rpc error: code = NotFound desc = could not find container \"35a4531a24f19fe78bb22aa2857795d89d79d11300e097343f34c543aea1cdf7\": container with ID starting with 35a4531a24f19fe78bb22aa2857795d89d79d11300e097343f34c543aea1cdf7 not found: ID does not exist" Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.129105 4612 scope.go:117] "RemoveContainer" containerID="690bd828c84a36dbcc78a33ecf2f2479854bcab7041aa1037503e5b8436bf612" Dec 03 07:52:56 crc kubenswrapper[4612]: E1203 07:52:56.129385 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"690bd828c84a36dbcc78a33ecf2f2479854bcab7041aa1037503e5b8436bf612\": container with ID starting with 690bd828c84a36dbcc78a33ecf2f2479854bcab7041aa1037503e5b8436bf612 not found: ID does not exist" containerID="690bd828c84a36dbcc78a33ecf2f2479854bcab7041aa1037503e5b8436bf612" Dec 03 07:52:56 crc kubenswrapper[4612]: I1203 07:52:56.129417 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"690bd828c84a36dbcc78a33ecf2f2479854bcab7041aa1037503e5b8436bf612"} err="failed to get container status \"690bd828c84a36dbcc78a33ecf2f2479854bcab7041aa1037503e5b8436bf612\": rpc error: code = NotFound desc = could not find container \"690bd828c84a36dbcc78a33ecf2f2479854bcab7041aa1037503e5b8436bf612\": container with ID starting with 690bd828c84a36dbcc78a33ecf2f2479854bcab7041aa1037503e5b8436bf612 not found: ID does not exist" Dec 03 07:52:57 crc kubenswrapper[4612]: I1203 07:52:57.116561 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43345f05-916f-4417-aa70-b781e4da082e" path="/var/lib/kubelet/pods/43345f05-916f-4417-aa70-b781e4da082e/volumes" Dec 03 07:52:57 crc kubenswrapper[4612]: I1203 07:52:57.119098 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:57 crc kubenswrapper[4612]: I1203 07:52:57.197108 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:58 crc kubenswrapper[4612]: I1203 07:52:58.384897 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v6m4m"] Dec 03 07:52:59 crc kubenswrapper[4612]: I1203 07:52:59.075834 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-v6m4m" podUID="ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" containerName="registry-server" containerID="cri-o://04fa6013b847e107c8a387d600a0da52bd98613060b95abb406dc224642bad09" gracePeriod=2 Dec 03 07:52:59 crc kubenswrapper[4612]: I1203 07:52:59.553251 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:52:59 crc kubenswrapper[4612]: I1203 07:52:59.575825 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-catalog-content\") pod \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\" (UID: \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\") " Dec 03 07:52:59 crc kubenswrapper[4612]: I1203 07:52:59.576013 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-utilities\") pod \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\" (UID: \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\") " Dec 03 07:52:59 crc kubenswrapper[4612]: I1203 07:52:59.576068 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9xx2\" (UniqueName: \"kubernetes.io/projected/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-kube-api-access-l9xx2\") pod \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\" (UID: \"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f\") " Dec 03 07:52:59 crc kubenswrapper[4612]: I1203 07:52:59.576734 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-utilities" (OuterVolumeSpecName: "utilities") pod "ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" (UID: "ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:52:59 crc kubenswrapper[4612]: I1203 07:52:59.581095 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-kube-api-access-l9xx2" (OuterVolumeSpecName: "kube-api-access-l9xx2") pod "ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" (UID: "ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f"). InnerVolumeSpecName "kube-api-access-l9xx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:52:59 crc kubenswrapper[4612]: I1203 07:52:59.678797 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:52:59 crc kubenswrapper[4612]: I1203 07:52:59.679157 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9xx2\" (UniqueName: \"kubernetes.io/projected/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-kube-api-access-l9xx2\") on node \"crc\" DevicePath \"\"" Dec 03 07:52:59 crc kubenswrapper[4612]: I1203 07:52:59.703372 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" (UID: "ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:52:59 crc kubenswrapper[4612]: I1203 07:52:59.781932 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.098940 4612 generic.go:334] "Generic (PLEG): container finished" podID="ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" containerID="04fa6013b847e107c8a387d600a0da52bd98613060b95abb406dc224642bad09" exitCode=0 Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.099005 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v6m4m" event={"ID":"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f","Type":"ContainerDied","Data":"04fa6013b847e107c8a387d600a0da52bd98613060b95abb406dc224642bad09"} Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.099075 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v6m4m" event={"ID":"ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f","Type":"ContainerDied","Data":"c5c283e0830bbd41a2c4b95c622b07c5f273f44f387e468de71637df42d4539a"} Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.099107 4612 scope.go:117] "RemoveContainer" containerID="04fa6013b847e107c8a387d600a0da52bd98613060b95abb406dc224642bad09" Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.099243 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v6m4m" Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.148697 4612 scope.go:117] "RemoveContainer" containerID="5bec4bf730d1cfe4207b9f4b1e83a52dabbc946765bb26760eb6099ede93b344" Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.155726 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v6m4m"] Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.167690 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-v6m4m"] Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.178254 4612 scope.go:117] "RemoveContainer" containerID="aae0e109e6b04290bf85faa6f8174d1ae35316d1b242567a82aeccb47aea632a" Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.232509 4612 scope.go:117] "RemoveContainer" containerID="04fa6013b847e107c8a387d600a0da52bd98613060b95abb406dc224642bad09" Dec 03 07:53:00 crc kubenswrapper[4612]: E1203 07:53:00.233146 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04fa6013b847e107c8a387d600a0da52bd98613060b95abb406dc224642bad09\": container with ID starting with 04fa6013b847e107c8a387d600a0da52bd98613060b95abb406dc224642bad09 not found: ID does not exist" containerID="04fa6013b847e107c8a387d600a0da52bd98613060b95abb406dc224642bad09" Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.233191 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04fa6013b847e107c8a387d600a0da52bd98613060b95abb406dc224642bad09"} err="failed to get container status \"04fa6013b847e107c8a387d600a0da52bd98613060b95abb406dc224642bad09\": rpc error: code = NotFound desc = could not find container \"04fa6013b847e107c8a387d600a0da52bd98613060b95abb406dc224642bad09\": container with ID starting with 04fa6013b847e107c8a387d600a0da52bd98613060b95abb406dc224642bad09 not found: ID does not exist" Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.233226 4612 scope.go:117] "RemoveContainer" containerID="5bec4bf730d1cfe4207b9f4b1e83a52dabbc946765bb26760eb6099ede93b344" Dec 03 07:53:00 crc kubenswrapper[4612]: E1203 07:53:00.233844 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bec4bf730d1cfe4207b9f4b1e83a52dabbc946765bb26760eb6099ede93b344\": container with ID starting with 5bec4bf730d1cfe4207b9f4b1e83a52dabbc946765bb26760eb6099ede93b344 not found: ID does not exist" containerID="5bec4bf730d1cfe4207b9f4b1e83a52dabbc946765bb26760eb6099ede93b344" Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.233884 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bec4bf730d1cfe4207b9f4b1e83a52dabbc946765bb26760eb6099ede93b344"} err="failed to get container status \"5bec4bf730d1cfe4207b9f4b1e83a52dabbc946765bb26760eb6099ede93b344\": rpc error: code = NotFound desc = could not find container \"5bec4bf730d1cfe4207b9f4b1e83a52dabbc946765bb26760eb6099ede93b344\": container with ID starting with 5bec4bf730d1cfe4207b9f4b1e83a52dabbc946765bb26760eb6099ede93b344 not found: ID does not exist" Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.233927 4612 scope.go:117] "RemoveContainer" containerID="aae0e109e6b04290bf85faa6f8174d1ae35316d1b242567a82aeccb47aea632a" Dec 03 07:53:00 crc kubenswrapper[4612]: E1203 07:53:00.234346 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aae0e109e6b04290bf85faa6f8174d1ae35316d1b242567a82aeccb47aea632a\": container with ID starting with aae0e109e6b04290bf85faa6f8174d1ae35316d1b242567a82aeccb47aea632a not found: ID does not exist" containerID="aae0e109e6b04290bf85faa6f8174d1ae35316d1b242567a82aeccb47aea632a" Dec 03 07:53:00 crc kubenswrapper[4612]: I1203 07:53:00.234394 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aae0e109e6b04290bf85faa6f8174d1ae35316d1b242567a82aeccb47aea632a"} err="failed to get container status \"aae0e109e6b04290bf85faa6f8174d1ae35316d1b242567a82aeccb47aea632a\": rpc error: code = NotFound desc = could not find container \"aae0e109e6b04290bf85faa6f8174d1ae35316d1b242567a82aeccb47aea632a\": container with ID starting with aae0e109e6b04290bf85faa6f8174d1ae35316d1b242567a82aeccb47aea632a not found: ID does not exist" Dec 03 07:53:01 crc kubenswrapper[4612]: I1203 07:53:01.091037 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:53:01 crc kubenswrapper[4612]: E1203 07:53:01.091453 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:53:01 crc kubenswrapper[4612]: I1203 07:53:01.125829 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" path="/var/lib/kubelet/pods/ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f/volumes" Dec 03 07:53:15 crc kubenswrapper[4612]: I1203 07:53:15.089816 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:53:15 crc kubenswrapper[4612]: E1203 07:53:15.090671 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:53:26 crc kubenswrapper[4612]: I1203 07:53:26.089799 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:53:26 crc kubenswrapper[4612]: E1203 07:53:26.090549 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:53:38 crc kubenswrapper[4612]: I1203 07:53:38.089764 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:53:38 crc kubenswrapper[4612]: E1203 07:53:38.090754 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:53:53 crc kubenswrapper[4612]: I1203 07:53:53.089970 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:53:53 crc kubenswrapper[4612]: E1203 07:53:53.090996 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:54:06 crc kubenswrapper[4612]: I1203 07:54:06.090156 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:54:06 crc kubenswrapper[4612]: E1203 07:54:06.090903 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:54:08 crc kubenswrapper[4612]: I1203 07:54:08.046871 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-cbc4-account-create-update-whdk7"] Dec 03 07:54:08 crc kubenswrapper[4612]: I1203 07:54:08.055456 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-c7fgx"] Dec 03 07:54:08 crc kubenswrapper[4612]: I1203 07:54:08.064465 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-cbc4-account-create-update-whdk7"] Dec 03 07:54:08 crc kubenswrapper[4612]: I1203 07:54:08.072318 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-c7fgx"] Dec 03 07:54:09 crc kubenswrapper[4612]: I1203 07:54:09.036001 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-bf84-account-create-update-jw9wr"] Dec 03 07:54:09 crc kubenswrapper[4612]: I1203 07:54:09.044889 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7356-account-create-update-4kqnh"] Dec 03 07:54:09 crc kubenswrapper[4612]: I1203 07:54:09.056031 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-tkm2k"] Dec 03 07:54:09 crc kubenswrapper[4612]: I1203 07:54:09.066299 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-47bvp"] Dec 03 07:54:09 crc kubenswrapper[4612]: I1203 07:54:09.073735 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-bf84-account-create-update-jw9wr"] Dec 03 07:54:09 crc kubenswrapper[4612]: I1203 07:54:09.080301 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-tkm2k"] Dec 03 07:54:09 crc kubenswrapper[4612]: I1203 07:54:09.086575 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-47bvp"] Dec 03 07:54:09 crc kubenswrapper[4612]: I1203 07:54:09.100249 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54" path="/var/lib/kubelet/pods/13e31ac1-0e50-42bb-9b4a-8c1c76a8ca54/volumes" Dec 03 07:54:09 crc kubenswrapper[4612]: I1203 07:54:09.100917 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45a224b3-8904-49fe-a237-6a8d3b2755eb" path="/var/lib/kubelet/pods/45a224b3-8904-49fe-a237-6a8d3b2755eb/volumes" Dec 03 07:54:09 crc kubenswrapper[4612]: I1203 07:54:09.102412 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc" path="/var/lib/kubelet/pods/7a14842a-1ab6-4951-bfad-7bfaa5c8f2bc/volumes" Dec 03 07:54:09 crc kubenswrapper[4612]: I1203 07:54:09.103732 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2201704-0a33-48bd-933a-879d56b8e6e7" path="/var/lib/kubelet/pods/b2201704-0a33-48bd-933a-879d56b8e6e7/volumes" Dec 03 07:54:09 crc kubenswrapper[4612]: I1203 07:54:09.106421 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eaf141e2-8218-4144-b0f2-d4568f152c99" path="/var/lib/kubelet/pods/eaf141e2-8218-4144-b0f2-d4568f152c99/volumes" Dec 03 07:54:09 crc kubenswrapper[4612]: I1203 07:54:09.107410 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-7356-account-create-update-4kqnh"] Dec 03 07:54:11 crc kubenswrapper[4612]: I1203 07:54:11.114366 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59e2290a-9814-42f9-acd7-ce36e42dd5e9" path="/var/lib/kubelet/pods/59e2290a-9814-42f9-acd7-ce36e42dd5e9/volumes" Dec 03 07:54:19 crc kubenswrapper[4612]: I1203 07:54:19.089486 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:54:19 crc kubenswrapper[4612]: E1203 07:54:19.090405 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:54:34 crc kubenswrapper[4612]: I1203 07:54:34.018552 4612 generic.go:334] "Generic (PLEG): container finished" podID="8d491f1b-5bbf-4508-8ddc-2e986613d792" containerID="2497be3c0ee801b7ab7ea6f2dea4322e3a43f074f18888452792ca4bb2befd20" exitCode=0 Dec 03 07:54:34 crc kubenswrapper[4612]: I1203 07:54:34.018924 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" event={"ID":"8d491f1b-5bbf-4508-8ddc-2e986613d792","Type":"ContainerDied","Data":"2497be3c0ee801b7ab7ea6f2dea4322e3a43f074f18888452792ca4bb2befd20"} Dec 03 07:54:34 crc kubenswrapper[4612]: I1203 07:54:34.090183 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:54:34 crc kubenswrapper[4612]: E1203 07:54:34.090513 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:54:35 crc kubenswrapper[4612]: I1203 07:54:35.546493 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:54:35 crc kubenswrapper[4612]: I1203 07:54:35.657115 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gw8db\" (UniqueName: \"kubernetes.io/projected/8d491f1b-5bbf-4508-8ddc-2e986613d792-kube-api-access-gw8db\") pod \"8d491f1b-5bbf-4508-8ddc-2e986613d792\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " Dec 03 07:54:35 crc kubenswrapper[4612]: I1203 07:54:35.657451 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-ssh-key\") pod \"8d491f1b-5bbf-4508-8ddc-2e986613d792\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " Dec 03 07:54:35 crc kubenswrapper[4612]: I1203 07:54:35.657554 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-inventory\") pod \"8d491f1b-5bbf-4508-8ddc-2e986613d792\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " Dec 03 07:54:35 crc kubenswrapper[4612]: I1203 07:54:35.657632 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-bootstrap-combined-ca-bundle\") pod \"8d491f1b-5bbf-4508-8ddc-2e986613d792\" (UID: \"8d491f1b-5bbf-4508-8ddc-2e986613d792\") " Dec 03 07:54:35 crc kubenswrapper[4612]: I1203 07:54:35.662723 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d491f1b-5bbf-4508-8ddc-2e986613d792-kube-api-access-gw8db" (OuterVolumeSpecName: "kube-api-access-gw8db") pod "8d491f1b-5bbf-4508-8ddc-2e986613d792" (UID: "8d491f1b-5bbf-4508-8ddc-2e986613d792"). InnerVolumeSpecName "kube-api-access-gw8db". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:54:35 crc kubenswrapper[4612]: I1203 07:54:35.663071 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "8d491f1b-5bbf-4508-8ddc-2e986613d792" (UID: "8d491f1b-5bbf-4508-8ddc-2e986613d792"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:54:35 crc kubenswrapper[4612]: I1203 07:54:35.688082 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8d491f1b-5bbf-4508-8ddc-2e986613d792" (UID: "8d491f1b-5bbf-4508-8ddc-2e986613d792"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:54:35 crc kubenswrapper[4612]: I1203 07:54:35.695671 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-inventory" (OuterVolumeSpecName: "inventory") pod "8d491f1b-5bbf-4508-8ddc-2e986613d792" (UID: "8d491f1b-5bbf-4508-8ddc-2e986613d792"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:54:35 crc kubenswrapper[4612]: I1203 07:54:35.760447 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gw8db\" (UniqueName: \"kubernetes.io/projected/8d491f1b-5bbf-4508-8ddc-2e986613d792-kube-api-access-gw8db\") on node \"crc\" DevicePath \"\"" Dec 03 07:54:35 crc kubenswrapper[4612]: I1203 07:54:35.760477 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 07:54:35 crc kubenswrapper[4612]: I1203 07:54:35.760485 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 07:54:35 crc kubenswrapper[4612]: I1203 07:54:35.760494 4612 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d491f1b-5bbf-4508-8ddc-2e986613d792-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.044826 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" event={"ID":"8d491f1b-5bbf-4508-8ddc-2e986613d792","Type":"ContainerDied","Data":"cad5965581b81962404ddc7ca3fc618dc7e6d1548dcfb868371785f7af51b27e"} Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.044867 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cad5965581b81962404ddc7ca3fc618dc7e6d1548dcfb868371785f7af51b27e" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.044932 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.053841 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-558a-account-create-update-rnmtc"] Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.072197 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-f2fmf"] Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.085999 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-rmxx5"] Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.101552 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-558a-account-create-update-rnmtc"] Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.111029 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-f2fmf"] Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.121429 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-rmxx5"] Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.130861 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-f5c8-account-create-update-qhpqt"] Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.140960 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-f5c8-account-create-update-qhpqt"] Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.174474 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv"] Dec 03 07:54:36 crc kubenswrapper[4612]: E1203 07:54:36.175127 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43345f05-916f-4417-aa70-b781e4da082e" containerName="extract-content" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.175155 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="43345f05-916f-4417-aa70-b781e4da082e" containerName="extract-content" Dec 03 07:54:36 crc kubenswrapper[4612]: E1203 07:54:36.175192 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d491f1b-5bbf-4508-8ddc-2e986613d792" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.175210 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d491f1b-5bbf-4508-8ddc-2e986613d792" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 07:54:36 crc kubenswrapper[4612]: E1203 07:54:36.175226 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" containerName="extract-utilities" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.175240 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" containerName="extract-utilities" Dec 03 07:54:36 crc kubenswrapper[4612]: E1203 07:54:36.175267 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43345f05-916f-4417-aa70-b781e4da082e" containerName="extract-utilities" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.175281 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="43345f05-916f-4417-aa70-b781e4da082e" containerName="extract-utilities" Dec 03 07:54:36 crc kubenswrapper[4612]: E1203 07:54:36.175304 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" containerName="registry-server" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.175316 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" containerName="registry-server" Dec 03 07:54:36 crc kubenswrapper[4612]: E1203 07:54:36.175347 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" containerName="extract-content" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.175359 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" containerName="extract-content" Dec 03 07:54:36 crc kubenswrapper[4612]: E1203 07:54:36.175381 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43345f05-916f-4417-aa70-b781e4da082e" containerName="registry-server" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.175391 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="43345f05-916f-4417-aa70-b781e4da082e" containerName="registry-server" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.175698 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca3708d0-d5f6-4b52-aab1-9a5c8ea3026f" containerName="registry-server" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.175730 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d491f1b-5bbf-4508-8ddc-2e986613d792" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.175759 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="43345f05-916f-4417-aa70-b781e4da082e" containerName="registry-server" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.176672 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.182655 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv"] Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.207654 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.207709 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.207802 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.207962 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.268795 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qb4j\" (UniqueName: \"kubernetes.io/projected/8d6c8be7-a683-465a-9894-08b5ca61791e-kube-api-access-2qb4j\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv\" (UID: \"8d6c8be7-a683-465a-9894-08b5ca61791e\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.268856 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d6c8be7-a683-465a-9894-08b5ca61791e-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv\" (UID: \"8d6c8be7-a683-465a-9894-08b5ca61791e\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.268997 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d6c8be7-a683-465a-9894-08b5ca61791e-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv\" (UID: \"8d6c8be7-a683-465a-9894-08b5ca61791e\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.370360 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qb4j\" (UniqueName: \"kubernetes.io/projected/8d6c8be7-a683-465a-9894-08b5ca61791e-kube-api-access-2qb4j\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv\" (UID: \"8d6c8be7-a683-465a-9894-08b5ca61791e\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.370470 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d6c8be7-a683-465a-9894-08b5ca61791e-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv\" (UID: \"8d6c8be7-a683-465a-9894-08b5ca61791e\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.372410 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d6c8be7-a683-465a-9894-08b5ca61791e-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv\" (UID: \"8d6c8be7-a683-465a-9894-08b5ca61791e\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.375817 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d6c8be7-a683-465a-9894-08b5ca61791e-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv\" (UID: \"8d6c8be7-a683-465a-9894-08b5ca61791e\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.386039 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d6c8be7-a683-465a-9894-08b5ca61791e-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv\" (UID: \"8d6c8be7-a683-465a-9894-08b5ca61791e\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.390250 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qb4j\" (UniqueName: \"kubernetes.io/projected/8d6c8be7-a683-465a-9894-08b5ca61791e-kube-api-access-2qb4j\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv\" (UID: \"8d6c8be7-a683-465a-9894-08b5ca61791e\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" Dec 03 07:54:36 crc kubenswrapper[4612]: I1203 07:54:36.522513 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" Dec 03 07:54:37 crc kubenswrapper[4612]: I1203 07:54:37.045122 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-qzfmm"] Dec 03 07:54:37 crc kubenswrapper[4612]: I1203 07:54:37.063624 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-4643-account-create-update-rv8kg"] Dec 03 07:54:37 crc kubenswrapper[4612]: I1203 07:54:37.076663 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-qzfmm"] Dec 03 07:54:37 crc kubenswrapper[4612]: I1203 07:54:37.087308 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-4643-account-create-update-rv8kg"] Dec 03 07:54:37 crc kubenswrapper[4612]: I1203 07:54:37.114052 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fb3251d-18b1-42f0-b048-3b7de91538a1" path="/var/lib/kubelet/pods/0fb3251d-18b1-42f0-b048-3b7de91538a1/volumes" Dec 03 07:54:37 crc kubenswrapper[4612]: I1203 07:54:37.114629 4612 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 07:54:37 crc kubenswrapper[4612]: I1203 07:54:37.115237 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70d803ac-6da1-49ea-b048-aa20f0062c44" path="/var/lib/kubelet/pods/70d803ac-6da1-49ea-b048-aa20f0062c44/volumes" Dec 03 07:54:37 crc kubenswrapper[4612]: I1203 07:54:37.116213 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="768893ad-00ba-4b59-96ba-b7a078879dbe" path="/var/lib/kubelet/pods/768893ad-00ba-4b59-96ba-b7a078879dbe/volumes" Dec 03 07:54:37 crc kubenswrapper[4612]: I1203 07:54:37.117127 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91c6a1d5-8062-4e6c-b12e-a44ed73f7038" path="/var/lib/kubelet/pods/91c6a1d5-8062-4e6c-b12e-a44ed73f7038/volumes" Dec 03 07:54:37 crc kubenswrapper[4612]: I1203 07:54:37.118504 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea00badd-5625-42e1-a6af-9fad9903385a" path="/var/lib/kubelet/pods/ea00badd-5625-42e1-a6af-9fad9903385a/volumes" Dec 03 07:54:37 crc kubenswrapper[4612]: I1203 07:54:37.119445 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb7ad4dc-78e8-4503-936c-dd76a4c73175" path="/var/lib/kubelet/pods/eb7ad4dc-78e8-4503-936c-dd76a4c73175/volumes" Dec 03 07:54:37 crc kubenswrapper[4612]: I1203 07:54:37.120168 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv"] Dec 03 07:54:38 crc kubenswrapper[4612]: I1203 07:54:38.063570 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" event={"ID":"8d6c8be7-a683-465a-9894-08b5ca61791e","Type":"ContainerStarted","Data":"1a81187634ebf74e4874f2f58587de8ff7663b4ab19bea285167eb627c686cf9"} Dec 03 07:54:38 crc kubenswrapper[4612]: I1203 07:54:38.063896 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" event={"ID":"8d6c8be7-a683-465a-9894-08b5ca61791e","Type":"ContainerStarted","Data":"5769ebcb6439a78d4417ed1adbcfa54317120281dde37758700e8461a978c0c1"} Dec 03 07:54:38 crc kubenswrapper[4612]: I1203 07:54:38.083144 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" podStartSLOduration=1.619556051 podStartE2EDuration="2.083122786s" podCreationTimestamp="2025-12-03 07:54:36 +0000 UTC" firstStartedPulling="2025-12-03 07:54:37.114351117 +0000 UTC m=+1640.287708517" lastFinishedPulling="2025-12-03 07:54:37.577917842 +0000 UTC m=+1640.751275252" observedRunningTime="2025-12-03 07:54:38.078977873 +0000 UTC m=+1641.252335283" watchObservedRunningTime="2025-12-03 07:54:38.083122786 +0000 UTC m=+1641.256480186" Dec 03 07:54:43 crc kubenswrapper[4612]: I1203 07:54:43.557121 4612 scope.go:117] "RemoveContainer" containerID="7ced67c5e7adfccc3f27c6e295ffe3f36578e952ff34573fb64ec22c21c2b3ab" Dec 03 07:54:43 crc kubenswrapper[4612]: I1203 07:54:43.583286 4612 scope.go:117] "RemoveContainer" containerID="40611c7331b96df8812e898efe6b7c7c05259bb908a16c2a6cb38d29c7d6956d" Dec 03 07:54:43 crc kubenswrapper[4612]: I1203 07:54:43.650509 4612 scope.go:117] "RemoveContainer" containerID="5775852b47c2d79141158b14850e6be49fed7e207e5ef255aab7d76ef010ea89" Dec 03 07:54:43 crc kubenswrapper[4612]: I1203 07:54:43.693980 4612 scope.go:117] "RemoveContainer" containerID="b1c5bdb2d90adab8e54f15f74c762dc784eee615e00aa65c02d5d100d3cdf8e1" Dec 03 07:54:43 crc kubenswrapper[4612]: I1203 07:54:43.738217 4612 scope.go:117] "RemoveContainer" containerID="d39568f0f8b07f8b92cf961ea4bd9e26e8dd80551f62690944600c4ce063ec94" Dec 03 07:54:43 crc kubenswrapper[4612]: I1203 07:54:43.785877 4612 scope.go:117] "RemoveContainer" containerID="ebe163931d8867d7a4f7e1ad88c24b740368a5bcc3edf74ffb77e18c32453f1e" Dec 03 07:54:43 crc kubenswrapper[4612]: I1203 07:54:43.829425 4612 scope.go:117] "RemoveContainer" containerID="68a8c4a9d0cfbee2a5dc444e027d5c5545b26a60bbabf8cdb10022a177dd3a32" Dec 03 07:54:43 crc kubenswrapper[4612]: I1203 07:54:43.850431 4612 scope.go:117] "RemoveContainer" containerID="beb97f036dccaae439558c36c712c3cf061a3758093fd654ca39369e4a79e023" Dec 03 07:54:43 crc kubenswrapper[4612]: I1203 07:54:43.871053 4612 scope.go:117] "RemoveContainer" containerID="700f22cf690f526e04937c5429bb99b695e01abc93bbb544b68976370bd88a20" Dec 03 07:54:43 crc kubenswrapper[4612]: I1203 07:54:43.888503 4612 scope.go:117] "RemoveContainer" containerID="cea1d7f57747e96f9476e2e746819e45105bf76d2b170adef35ac08d2600c1c7" Dec 03 07:54:43 crc kubenswrapper[4612]: I1203 07:54:43.909722 4612 scope.go:117] "RemoveContainer" containerID="5f1f68d98042e9a5e9b48250509683a5bd9870bc539c66b3cffa943c8ef1dda6" Dec 03 07:54:43 crc kubenswrapper[4612]: I1203 07:54:43.934689 4612 scope.go:117] "RemoveContainer" containerID="2e51cd4b6e9f5b4b293be5b61d03d1bbce9f3344ef4fe9aec445cc1654c23a14" Dec 03 07:54:45 crc kubenswrapper[4612]: I1203 07:54:45.090197 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:54:45 crc kubenswrapper[4612]: E1203 07:54:45.091157 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:54:46 crc kubenswrapper[4612]: I1203 07:54:46.058068 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-wzkm4"] Dec 03 07:54:46 crc kubenswrapper[4612]: I1203 07:54:46.068388 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-sntq8"] Dec 03 07:54:46 crc kubenswrapper[4612]: I1203 07:54:46.077208 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-wzkm4"] Dec 03 07:54:46 crc kubenswrapper[4612]: I1203 07:54:46.087818 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-sntq8"] Dec 03 07:54:47 crc kubenswrapper[4612]: I1203 07:54:47.107303 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58391f1e-0f92-4c3c-844d-b74b3bfd1af6" path="/var/lib/kubelet/pods/58391f1e-0f92-4c3c-844d-b74b3bfd1af6/volumes" Dec 03 07:54:47 crc kubenswrapper[4612]: I1203 07:54:47.108817 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8d788a9-ac6b-4242-9ffc-26578617f984" path="/var/lib/kubelet/pods/c8d788a9-ac6b-4242-9ffc-26578617f984/volumes" Dec 03 07:54:59 crc kubenswrapper[4612]: I1203 07:54:59.090545 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:54:59 crc kubenswrapper[4612]: E1203 07:54:59.093228 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:55:13 crc kubenswrapper[4612]: I1203 07:55:13.090660 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:55:13 crc kubenswrapper[4612]: E1203 07:55:13.091822 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:55:22 crc kubenswrapper[4612]: I1203 07:55:22.065479 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-4pvx9"] Dec 03 07:55:22 crc kubenswrapper[4612]: I1203 07:55:22.073654 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-4pvx9"] Dec 03 07:55:23 crc kubenswrapper[4612]: I1203 07:55:23.102367 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6433f62-cce6-47e4-971f-9d568a1e0cb3" path="/var/lib/kubelet/pods/c6433f62-cce6-47e4-971f-9d568a1e0cb3/volumes" Dec 03 07:55:24 crc kubenswrapper[4612]: I1203 07:55:24.090018 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:55:24 crc kubenswrapper[4612]: E1203 07:55:24.090611 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:55:31 crc kubenswrapper[4612]: I1203 07:55:31.036226 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-pqqrz"] Dec 03 07:55:31 crc kubenswrapper[4612]: I1203 07:55:31.047766 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-pqqrz"] Dec 03 07:55:31 crc kubenswrapper[4612]: I1203 07:55:31.103123 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0600693c-3ac0-4d42-8efd-c3140c6474a8" path="/var/lib/kubelet/pods/0600693c-3ac0-4d42-8efd-c3140c6474a8/volumes" Dec 03 07:55:37 crc kubenswrapper[4612]: I1203 07:55:37.064047 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-gc599"] Dec 03 07:55:37 crc kubenswrapper[4612]: I1203 07:55:37.073675 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-gc599"] Dec 03 07:55:37 crc kubenswrapper[4612]: I1203 07:55:37.095884 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:55:37 crc kubenswrapper[4612]: E1203 07:55:37.096415 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:55:37 crc kubenswrapper[4612]: I1203 07:55:37.101812 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a0871c4-f877-4382-8458-cc41ff21f67d" path="/var/lib/kubelet/pods/3a0871c4-f877-4382-8458-cc41ff21f67d/volumes" Dec 03 07:55:43 crc kubenswrapper[4612]: I1203 07:55:43.029670 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-ptq9j"] Dec 03 07:55:43 crc kubenswrapper[4612]: I1203 07:55:43.041608 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-ptq9j"] Dec 03 07:55:43 crc kubenswrapper[4612]: I1203 07:55:43.101582 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff" path="/var/lib/kubelet/pods/2b19c78e-8e2a-4d07-9c3f-1fbd6ccfd7ff/volumes" Dec 03 07:55:44 crc kubenswrapper[4612]: I1203 07:55:44.155524 4612 scope.go:117] "RemoveContainer" containerID="049fb7ab852cbc63d04cc162f819fc8f83c2e03c36871c3487b835ada5813e8f" Dec 03 07:55:44 crc kubenswrapper[4612]: I1203 07:55:44.198971 4612 scope.go:117] "RemoveContainer" containerID="11067c81d6a6cb14cc8a9fc088579b182ea9765213525108e86817b3c865b59d" Dec 03 07:55:44 crc kubenswrapper[4612]: I1203 07:55:44.248054 4612 scope.go:117] "RemoveContainer" containerID="34d6daa6f15b1c60dccf87c84047399bc6abe70b6604de7104cfcbaf26ab6442" Dec 03 07:55:44 crc kubenswrapper[4612]: I1203 07:55:44.290932 4612 scope.go:117] "RemoveContainer" containerID="46f45e268c333bcea38f9a80e4fbb4eb5f18dae0ed845ee802df9bea3df4c811" Dec 03 07:55:44 crc kubenswrapper[4612]: I1203 07:55:44.346445 4612 scope.go:117] "RemoveContainer" containerID="f9db1bd3ad19cd9f1dab9636c058ac499116de875fc34ce04cb474d5a211da77" Dec 03 07:55:44 crc kubenswrapper[4612]: I1203 07:55:44.407337 4612 scope.go:117] "RemoveContainer" containerID="319d2dac01f620e4579713fbb6e23f6a4dbf61b21db0822d9fd053bf9ec59fdf" Dec 03 07:55:52 crc kubenswrapper[4612]: I1203 07:55:52.090159 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:55:52 crc kubenswrapper[4612]: E1203 07:55:52.090882 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:56:01 crc kubenswrapper[4612]: I1203 07:56:01.043217 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-nvxkr"] Dec 03 07:56:01 crc kubenswrapper[4612]: I1203 07:56:01.052404 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-nvxkr"] Dec 03 07:56:01 crc kubenswrapper[4612]: I1203 07:56:01.102378 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08" path="/var/lib/kubelet/pods/cb33a6b1-8b7b-4a8d-b6ef-8fd834dcdf08/volumes" Dec 03 07:56:06 crc kubenswrapper[4612]: I1203 07:56:06.090445 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:56:06 crc kubenswrapper[4612]: E1203 07:56:06.091385 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:56:19 crc kubenswrapper[4612]: I1203 07:56:19.090270 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:56:19 crc kubenswrapper[4612]: E1203 07:56:19.091375 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:56:32 crc kubenswrapper[4612]: I1203 07:56:32.089274 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:56:32 crc kubenswrapper[4612]: E1203 07:56:32.090560 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:56:39 crc kubenswrapper[4612]: I1203 07:56:39.320642 4612 generic.go:334] "Generic (PLEG): container finished" podID="8d6c8be7-a683-465a-9894-08b5ca61791e" containerID="1a81187634ebf74e4874f2f58587de8ff7663b4ab19bea285167eb627c686cf9" exitCode=0 Dec 03 07:56:39 crc kubenswrapper[4612]: I1203 07:56:39.320734 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" event={"ID":"8d6c8be7-a683-465a-9894-08b5ca61791e","Type":"ContainerDied","Data":"1a81187634ebf74e4874f2f58587de8ff7663b4ab19bea285167eb627c686cf9"} Dec 03 07:56:40 crc kubenswrapper[4612]: I1203 07:56:40.721548 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" Dec 03 07:56:40 crc kubenswrapper[4612]: I1203 07:56:40.835017 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d6c8be7-a683-465a-9894-08b5ca61791e-ssh-key\") pod \"8d6c8be7-a683-465a-9894-08b5ca61791e\" (UID: \"8d6c8be7-a683-465a-9894-08b5ca61791e\") " Dec 03 07:56:40 crc kubenswrapper[4612]: I1203 07:56:40.835175 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qb4j\" (UniqueName: \"kubernetes.io/projected/8d6c8be7-a683-465a-9894-08b5ca61791e-kube-api-access-2qb4j\") pod \"8d6c8be7-a683-465a-9894-08b5ca61791e\" (UID: \"8d6c8be7-a683-465a-9894-08b5ca61791e\") " Dec 03 07:56:40 crc kubenswrapper[4612]: I1203 07:56:40.835261 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d6c8be7-a683-465a-9894-08b5ca61791e-inventory\") pod \"8d6c8be7-a683-465a-9894-08b5ca61791e\" (UID: \"8d6c8be7-a683-465a-9894-08b5ca61791e\") " Dec 03 07:56:40 crc kubenswrapper[4612]: I1203 07:56:40.840292 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d6c8be7-a683-465a-9894-08b5ca61791e-kube-api-access-2qb4j" (OuterVolumeSpecName: "kube-api-access-2qb4j") pod "8d6c8be7-a683-465a-9894-08b5ca61791e" (UID: "8d6c8be7-a683-465a-9894-08b5ca61791e"). InnerVolumeSpecName "kube-api-access-2qb4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:56:40 crc kubenswrapper[4612]: I1203 07:56:40.866335 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d6c8be7-a683-465a-9894-08b5ca61791e-inventory" (OuterVolumeSpecName: "inventory") pod "8d6c8be7-a683-465a-9894-08b5ca61791e" (UID: "8d6c8be7-a683-465a-9894-08b5ca61791e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:56:40 crc kubenswrapper[4612]: I1203 07:56:40.869357 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d6c8be7-a683-465a-9894-08b5ca61791e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8d6c8be7-a683-465a-9894-08b5ca61791e" (UID: "8d6c8be7-a683-465a-9894-08b5ca61791e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:56:40 crc kubenswrapper[4612]: I1203 07:56:40.937982 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d6c8be7-a683-465a-9894-08b5ca61791e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 07:56:40 crc kubenswrapper[4612]: I1203 07:56:40.938030 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qb4j\" (UniqueName: \"kubernetes.io/projected/8d6c8be7-a683-465a-9894-08b5ca61791e-kube-api-access-2qb4j\") on node \"crc\" DevicePath \"\"" Dec 03 07:56:40 crc kubenswrapper[4612]: I1203 07:56:40.938044 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d6c8be7-a683-465a-9894-08b5ca61791e-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.346082 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" event={"ID":"8d6c8be7-a683-465a-9894-08b5ca61791e","Type":"ContainerDied","Data":"5769ebcb6439a78d4417ed1adbcfa54317120281dde37758700e8461a978c0c1"} Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.346119 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5769ebcb6439a78d4417ed1adbcfa54317120281dde37758700e8461a978c0c1" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.346139 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.437269 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk"] Dec 03 07:56:41 crc kubenswrapper[4612]: E1203 07:56:41.437687 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d6c8be7-a683-465a-9894-08b5ca61791e" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.437713 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d6c8be7-a683-465a-9894-08b5ca61791e" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.438003 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d6c8be7-a683-465a-9894-08b5ca61791e" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.438767 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.442187 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.442372 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.444319 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.448055 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m45rk\" (UID: \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.448124 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vl7vh\" (UniqueName: \"kubernetes.io/projected/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-kube-api-access-vl7vh\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m45rk\" (UID: \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.448160 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m45rk\" (UID: \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.452049 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk"] Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.453251 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.550329 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m45rk\" (UID: \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.550449 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vl7vh\" (UniqueName: \"kubernetes.io/projected/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-kube-api-access-vl7vh\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m45rk\" (UID: \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.550487 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m45rk\" (UID: \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.553930 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m45rk\" (UID: \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.553930 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m45rk\" (UID: \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.578588 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vl7vh\" (UniqueName: \"kubernetes.io/projected/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-kube-api-access-vl7vh\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m45rk\" (UID: \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" Dec 03 07:56:41 crc kubenswrapper[4612]: I1203 07:56:41.759357 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" Dec 03 07:56:42 crc kubenswrapper[4612]: I1203 07:56:42.320400 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk"] Dec 03 07:56:42 crc kubenswrapper[4612]: W1203 07:56:42.324825 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27f1ab42_07b8_4697_ae0b_0afc5cb72e06.slice/crio-7d7182d0e6d9277554fde8f78ee88ae975a8d06b14f9d193a4a0a6ac52691cd7 WatchSource:0}: Error finding container 7d7182d0e6d9277554fde8f78ee88ae975a8d06b14f9d193a4a0a6ac52691cd7: Status 404 returned error can't find the container with id 7d7182d0e6d9277554fde8f78ee88ae975a8d06b14f9d193a4a0a6ac52691cd7 Dec 03 07:56:42 crc kubenswrapper[4612]: I1203 07:56:42.362927 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" event={"ID":"27f1ab42-07b8-4697-ae0b-0afc5cb72e06","Type":"ContainerStarted","Data":"7d7182d0e6d9277554fde8f78ee88ae975a8d06b14f9d193a4a0a6ac52691cd7"} Dec 03 07:56:43 crc kubenswrapper[4612]: I1203 07:56:43.089714 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:56:43 crc kubenswrapper[4612]: E1203 07:56:43.090307 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:56:43 crc kubenswrapper[4612]: I1203 07:56:43.381327 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" event={"ID":"27f1ab42-07b8-4697-ae0b-0afc5cb72e06","Type":"ContainerStarted","Data":"56c6b2455f41e99fe10d857a74643d17b6b5b6d2f0d8a1dbb7e2d8f206f13c80"} Dec 03 07:56:43 crc kubenswrapper[4612]: I1203 07:56:43.408385 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" podStartSLOduration=1.8804516489999998 podStartE2EDuration="2.408366934s" podCreationTimestamp="2025-12-03 07:56:41 +0000 UTC" firstStartedPulling="2025-12-03 07:56:42.329919225 +0000 UTC m=+1765.503276635" lastFinishedPulling="2025-12-03 07:56:42.85783451 +0000 UTC m=+1766.031191920" observedRunningTime="2025-12-03 07:56:43.398314242 +0000 UTC m=+1766.571671642" watchObservedRunningTime="2025-12-03 07:56:43.408366934 +0000 UTC m=+1766.581724334" Dec 03 07:56:44 crc kubenswrapper[4612]: I1203 07:56:44.580397 4612 scope.go:117] "RemoveContainer" containerID="8b96aee156c0cafacba42f1fdda0703299e6d91b25e2330e57d7bba7e1e49e95" Dec 03 07:56:45 crc kubenswrapper[4612]: I1203 07:56:45.044331 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-krjbf"] Dec 03 07:56:45 crc kubenswrapper[4612]: I1203 07:56:45.051569 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-nnmgl"] Dec 03 07:56:45 crc kubenswrapper[4612]: I1203 07:56:45.058702 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-krjbf"] Dec 03 07:56:45 crc kubenswrapper[4612]: I1203 07:56:45.066235 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-nnmgl"] Dec 03 07:56:45 crc kubenswrapper[4612]: I1203 07:56:45.101907 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67" path="/var/lib/kubelet/pods/f4a195d4-da6b-4c7f-a68e-0cbd8f9a4e67/volumes" Dec 03 07:56:45 crc kubenswrapper[4612]: I1203 07:56:45.103400 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5a6e38a-a03f-4803-82e6-1acdd1b843ed" path="/var/lib/kubelet/pods/f5a6e38a-a03f-4803-82e6-1acdd1b843ed/volumes" Dec 03 07:56:46 crc kubenswrapper[4612]: I1203 07:56:46.032645 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-2cf4-account-create-update-6p9qp"] Dec 03 07:56:46 crc kubenswrapper[4612]: I1203 07:56:46.039636 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-2cf4-account-create-update-6p9qp"] Dec 03 07:56:46 crc kubenswrapper[4612]: I1203 07:56:46.046913 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-fe46-account-create-update-r7zpj"] Dec 03 07:56:46 crc kubenswrapper[4612]: I1203 07:56:46.057734 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-zpc84"] Dec 03 07:56:46 crc kubenswrapper[4612]: I1203 07:56:46.065330 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-357d-account-create-update-cqp62"] Dec 03 07:56:46 crc kubenswrapper[4612]: I1203 07:56:46.072497 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-fe46-account-create-update-r7zpj"] Dec 03 07:56:46 crc kubenswrapper[4612]: I1203 07:56:46.080067 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-zpc84"] Dec 03 07:56:46 crc kubenswrapper[4612]: I1203 07:56:46.087904 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-357d-account-create-update-cqp62"] Dec 03 07:56:47 crc kubenswrapper[4612]: I1203 07:56:47.102912 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b0e8baf-18db-482a-994e-6ccd87671fee" path="/var/lib/kubelet/pods/4b0e8baf-18db-482a-994e-6ccd87671fee/volumes" Dec 03 07:56:47 crc kubenswrapper[4612]: I1203 07:56:47.104057 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75c9c6ea-f164-4f1d-b907-681771c086e5" path="/var/lib/kubelet/pods/75c9c6ea-f164-4f1d-b907-681771c086e5/volumes" Dec 03 07:56:47 crc kubenswrapper[4612]: I1203 07:56:47.104989 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82be14dd-0210-4d88-8dbe-c4ca490399aa" path="/var/lib/kubelet/pods/82be14dd-0210-4d88-8dbe-c4ca490399aa/volumes" Dec 03 07:56:47 crc kubenswrapper[4612]: I1203 07:56:47.105820 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc2359b8-eb58-403b-a848-bf3d750015bf" path="/var/lib/kubelet/pods/dc2359b8-eb58-403b-a848-bf3d750015bf/volumes" Dec 03 07:56:56 crc kubenswrapper[4612]: I1203 07:56:56.089872 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:56:56 crc kubenswrapper[4612]: E1203 07:56:56.091395 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:57:11 crc kubenswrapper[4612]: I1203 07:57:11.090032 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:57:11 crc kubenswrapper[4612]: E1203 07:57:11.091441 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:57:20 crc kubenswrapper[4612]: I1203 07:57:20.054851 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-m5ff7"] Dec 03 07:57:20 crc kubenswrapper[4612]: I1203 07:57:20.068877 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-m5ff7"] Dec 03 07:57:21 crc kubenswrapper[4612]: I1203 07:57:21.110364 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="304d081f-b185-4106-b4e1-56b1bdf60a8f" path="/var/lib/kubelet/pods/304d081f-b185-4106-b4e1-56b1bdf60a8f/volumes" Dec 03 07:57:25 crc kubenswrapper[4612]: I1203 07:57:25.090319 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:57:25 crc kubenswrapper[4612]: E1203 07:57:25.091038 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:57:37 crc kubenswrapper[4612]: I1203 07:57:37.090059 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:57:37 crc kubenswrapper[4612]: E1203 07:57:37.091356 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 07:57:44 crc kubenswrapper[4612]: I1203 07:57:44.659208 4612 scope.go:117] "RemoveContainer" containerID="b31385f605e860af4bac5da70cfe14abba501d6a181fcc48d734ff66902bab4c" Dec 03 07:57:44 crc kubenswrapper[4612]: I1203 07:57:44.698319 4612 scope.go:117] "RemoveContainer" containerID="4628bbeab0dbb5bf38ec46ef91ce70fc5da54f1015a5a3bb5105d8b7686819c4" Dec 03 07:57:44 crc kubenswrapper[4612]: I1203 07:57:44.743461 4612 scope.go:117] "RemoveContainer" containerID="2496b60700d18c3720d14fdc2988a4d0b0bf8680a9d3c258762bc17077315378" Dec 03 07:57:44 crc kubenswrapper[4612]: I1203 07:57:44.804544 4612 scope.go:117] "RemoveContainer" containerID="fdf9b31797c050d16e9f44553c77c7cd3c8037b4c9109647627b1447433c2244" Dec 03 07:57:44 crc kubenswrapper[4612]: I1203 07:57:44.850516 4612 scope.go:117] "RemoveContainer" containerID="ff484b7a21a0b2dd360a06ca438a41e1a48881dffa515269ca506da4ca4329f7" Dec 03 07:57:44 crc kubenswrapper[4612]: I1203 07:57:44.895107 4612 scope.go:117] "RemoveContainer" containerID="710ce6f0c20285af1557680764bb5701049012bf9c1868315e7d23b1b1640833" Dec 03 07:57:44 crc kubenswrapper[4612]: I1203 07:57:44.927267 4612 scope.go:117] "RemoveContainer" containerID="0ebd2f457a335251fb4347a02e1cc00b8de61f655d1f4357358fdfcbf9820a17" Dec 03 07:57:45 crc kubenswrapper[4612]: I1203 07:57:45.054887 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s5xlq"] Dec 03 07:57:45 crc kubenswrapper[4612]: I1203 07:57:45.063826 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s5xlq"] Dec 03 07:57:45 crc kubenswrapper[4612]: I1203 07:57:45.101226 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0982d29-f1b5-4dd6-b87b-a70a08e54712" path="/var/lib/kubelet/pods/a0982d29-f1b5-4dd6-b87b-a70a08e54712/volumes" Dec 03 07:57:46 crc kubenswrapper[4612]: I1203 07:57:46.029694 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-q746f"] Dec 03 07:57:46 crc kubenswrapper[4612]: I1203 07:57:46.036530 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-q746f"] Dec 03 07:57:47 crc kubenswrapper[4612]: I1203 07:57:47.103401 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f97e1ed8-5ec1-4b29-bb43-8ff083052bfd" path="/var/lib/kubelet/pods/f97e1ed8-5ec1-4b29-bb43-8ff083052bfd/volumes" Dec 03 07:57:50 crc kubenswrapper[4612]: I1203 07:57:50.090692 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 07:57:51 crc kubenswrapper[4612]: I1203 07:57:51.045503 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"f927977d7c370ea006286e23ee308504eb691fc6127f52fc2dd3867d9ba39ea2"} Dec 03 07:58:09 crc kubenswrapper[4612]: I1203 07:58:09.515391 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-twlcj"] Dec 03 07:58:09 crc kubenswrapper[4612]: I1203 07:58:09.517571 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:09 crc kubenswrapper[4612]: I1203 07:58:09.543567 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-twlcj"] Dec 03 07:58:09 crc kubenswrapper[4612]: I1203 07:58:09.642283 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81157c4d-0970-4303-9bde-397f51fb4b65-catalog-content\") pod \"community-operators-twlcj\" (UID: \"81157c4d-0970-4303-9bde-397f51fb4b65\") " pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:09 crc kubenswrapper[4612]: I1203 07:58:09.642628 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qq5ts\" (UniqueName: \"kubernetes.io/projected/81157c4d-0970-4303-9bde-397f51fb4b65-kube-api-access-qq5ts\") pod \"community-operators-twlcj\" (UID: \"81157c4d-0970-4303-9bde-397f51fb4b65\") " pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:09 crc kubenswrapper[4612]: I1203 07:58:09.642913 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81157c4d-0970-4303-9bde-397f51fb4b65-utilities\") pod \"community-operators-twlcj\" (UID: \"81157c4d-0970-4303-9bde-397f51fb4b65\") " pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:09 crc kubenswrapper[4612]: I1203 07:58:09.744301 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81157c4d-0970-4303-9bde-397f51fb4b65-utilities\") pod \"community-operators-twlcj\" (UID: \"81157c4d-0970-4303-9bde-397f51fb4b65\") " pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:09 crc kubenswrapper[4612]: I1203 07:58:09.744590 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81157c4d-0970-4303-9bde-397f51fb4b65-catalog-content\") pod \"community-operators-twlcj\" (UID: \"81157c4d-0970-4303-9bde-397f51fb4b65\") " pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:09 crc kubenswrapper[4612]: I1203 07:58:09.744763 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qq5ts\" (UniqueName: \"kubernetes.io/projected/81157c4d-0970-4303-9bde-397f51fb4b65-kube-api-access-qq5ts\") pod \"community-operators-twlcj\" (UID: \"81157c4d-0970-4303-9bde-397f51fb4b65\") " pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:09 crc kubenswrapper[4612]: I1203 07:58:09.745014 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81157c4d-0970-4303-9bde-397f51fb4b65-utilities\") pod \"community-operators-twlcj\" (UID: \"81157c4d-0970-4303-9bde-397f51fb4b65\") " pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:09 crc kubenswrapper[4612]: I1203 07:58:09.745115 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81157c4d-0970-4303-9bde-397f51fb4b65-catalog-content\") pod \"community-operators-twlcj\" (UID: \"81157c4d-0970-4303-9bde-397f51fb4b65\") " pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:09 crc kubenswrapper[4612]: I1203 07:58:09.771121 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qq5ts\" (UniqueName: \"kubernetes.io/projected/81157c4d-0970-4303-9bde-397f51fb4b65-kube-api-access-qq5ts\") pod \"community-operators-twlcj\" (UID: \"81157c4d-0970-4303-9bde-397f51fb4b65\") " pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:09 crc kubenswrapper[4612]: I1203 07:58:09.849663 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:10 crc kubenswrapper[4612]: I1203 07:58:10.253918 4612 generic.go:334] "Generic (PLEG): container finished" podID="27f1ab42-07b8-4697-ae0b-0afc5cb72e06" containerID="56c6b2455f41e99fe10d857a74643d17b6b5b6d2f0d8a1dbb7e2d8f206f13c80" exitCode=0 Dec 03 07:58:10 crc kubenswrapper[4612]: I1203 07:58:10.254198 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" event={"ID":"27f1ab42-07b8-4697-ae0b-0afc5cb72e06","Type":"ContainerDied","Data":"56c6b2455f41e99fe10d857a74643d17b6b5b6d2f0d8a1dbb7e2d8f206f13c80"} Dec 03 07:58:10 crc kubenswrapper[4612]: I1203 07:58:10.456299 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-twlcj"] Dec 03 07:58:11 crc kubenswrapper[4612]: I1203 07:58:11.269491 4612 generic.go:334] "Generic (PLEG): container finished" podID="81157c4d-0970-4303-9bde-397f51fb4b65" containerID="9fc8c8457e077f67ffda088e7be548b478ce6b9436c9a906e99a3d9dd1816845" exitCode=0 Dec 03 07:58:11 crc kubenswrapper[4612]: I1203 07:58:11.269580 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-twlcj" event={"ID":"81157c4d-0970-4303-9bde-397f51fb4b65","Type":"ContainerDied","Data":"9fc8c8457e077f67ffda088e7be548b478ce6b9436c9a906e99a3d9dd1816845"} Dec 03 07:58:11 crc kubenswrapper[4612]: I1203 07:58:11.269865 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-twlcj" event={"ID":"81157c4d-0970-4303-9bde-397f51fb4b65","Type":"ContainerStarted","Data":"7e3de0f18fc345954d94b6507e6041615e3399c000678aca4bb533a07accb8a6"} Dec 03 07:58:11 crc kubenswrapper[4612]: I1203 07:58:11.781431 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" Dec 03 07:58:11 crc kubenswrapper[4612]: I1203 07:58:11.898054 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-inventory\") pod \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\" (UID: \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\") " Dec 03 07:58:11 crc kubenswrapper[4612]: I1203 07:58:11.898426 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-ssh-key\") pod \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\" (UID: \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\") " Dec 03 07:58:11 crc kubenswrapper[4612]: I1203 07:58:11.898505 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vl7vh\" (UniqueName: \"kubernetes.io/projected/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-kube-api-access-vl7vh\") pod \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\" (UID: \"27f1ab42-07b8-4697-ae0b-0afc5cb72e06\") " Dec 03 07:58:11 crc kubenswrapper[4612]: I1203 07:58:11.914608 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-kube-api-access-vl7vh" (OuterVolumeSpecName: "kube-api-access-vl7vh") pod "27f1ab42-07b8-4697-ae0b-0afc5cb72e06" (UID: "27f1ab42-07b8-4697-ae0b-0afc5cb72e06"). InnerVolumeSpecName "kube-api-access-vl7vh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:58:11 crc kubenswrapper[4612]: I1203 07:58:11.926712 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "27f1ab42-07b8-4697-ae0b-0afc5cb72e06" (UID: "27f1ab42-07b8-4697-ae0b-0afc5cb72e06"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:58:11 crc kubenswrapper[4612]: I1203 07:58:11.933410 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-inventory" (OuterVolumeSpecName: "inventory") pod "27f1ab42-07b8-4697-ae0b-0afc5cb72e06" (UID: "27f1ab42-07b8-4697-ae0b-0afc5cb72e06"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.001334 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vl7vh\" (UniqueName: \"kubernetes.io/projected/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-kube-api-access-vl7vh\") on node \"crc\" DevicePath \"\"" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.001365 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.001377 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27f1ab42-07b8-4697-ae0b-0afc5cb72e06-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.285124 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" event={"ID":"27f1ab42-07b8-4697-ae0b-0afc5cb72e06","Type":"ContainerDied","Data":"7d7182d0e6d9277554fde8f78ee88ae975a8d06b14f9d193a4a0a6ac52691cd7"} Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.285216 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d7182d0e6d9277554fde8f78ee88ae975a8d06b14f9d193a4a0a6ac52691cd7" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.285250 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m45rk" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.412239 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm"] Dec 03 07:58:12 crc kubenswrapper[4612]: E1203 07:58:12.412732 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27f1ab42-07b8-4697-ae0b-0afc5cb72e06" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.412754 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="27f1ab42-07b8-4697-ae0b-0afc5cb72e06" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.413062 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="27f1ab42-07b8-4697-ae0b-0afc5cb72e06" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.413826 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.417810 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.418709 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.419238 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.419892 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.431013 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm"] Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.513256 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7624e359-51e1-46df-829a-12aebc8d3688-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-p95mm\" (UID: \"7624e359-51e1-46df-829a-12aebc8d3688\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.513540 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-888xf\" (UniqueName: \"kubernetes.io/projected/7624e359-51e1-46df-829a-12aebc8d3688-kube-api-access-888xf\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-p95mm\" (UID: \"7624e359-51e1-46df-829a-12aebc8d3688\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.513697 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7624e359-51e1-46df-829a-12aebc8d3688-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-p95mm\" (UID: \"7624e359-51e1-46df-829a-12aebc8d3688\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.615255 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7624e359-51e1-46df-829a-12aebc8d3688-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-p95mm\" (UID: \"7624e359-51e1-46df-829a-12aebc8d3688\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.615384 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-888xf\" (UniqueName: \"kubernetes.io/projected/7624e359-51e1-46df-829a-12aebc8d3688-kube-api-access-888xf\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-p95mm\" (UID: \"7624e359-51e1-46df-829a-12aebc8d3688\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.615431 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7624e359-51e1-46df-829a-12aebc8d3688-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-p95mm\" (UID: \"7624e359-51e1-46df-829a-12aebc8d3688\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.620984 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7624e359-51e1-46df-829a-12aebc8d3688-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-p95mm\" (UID: \"7624e359-51e1-46df-829a-12aebc8d3688\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.624663 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7624e359-51e1-46df-829a-12aebc8d3688-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-p95mm\" (UID: \"7624e359-51e1-46df-829a-12aebc8d3688\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.644015 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-888xf\" (UniqueName: \"kubernetes.io/projected/7624e359-51e1-46df-829a-12aebc8d3688-kube-api-access-888xf\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-p95mm\" (UID: \"7624e359-51e1-46df-829a-12aebc8d3688\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" Dec 03 07:58:12 crc kubenswrapper[4612]: I1203 07:58:12.737341 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" Dec 03 07:58:13 crc kubenswrapper[4612]: I1203 07:58:13.266616 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm"] Dec 03 07:58:13 crc kubenswrapper[4612]: W1203 07:58:13.272267 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7624e359_51e1_46df_829a_12aebc8d3688.slice/crio-528acb1cc09f5d68c458ba0f7471974dbc96ce8ecd69e8d110e3ce22596fcf94 WatchSource:0}: Error finding container 528acb1cc09f5d68c458ba0f7471974dbc96ce8ecd69e8d110e3ce22596fcf94: Status 404 returned error can't find the container with id 528acb1cc09f5d68c458ba0f7471974dbc96ce8ecd69e8d110e3ce22596fcf94 Dec 03 07:58:13 crc kubenswrapper[4612]: I1203 07:58:13.295573 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" event={"ID":"7624e359-51e1-46df-829a-12aebc8d3688","Type":"ContainerStarted","Data":"528acb1cc09f5d68c458ba0f7471974dbc96ce8ecd69e8d110e3ce22596fcf94"} Dec 03 07:58:14 crc kubenswrapper[4612]: I1203 07:58:14.306190 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" event={"ID":"7624e359-51e1-46df-829a-12aebc8d3688","Type":"ContainerStarted","Data":"a3be5f71720d1cd561f0d3abe2893fd1ce140e902d7aba74b76aff60df105965"} Dec 03 07:58:14 crc kubenswrapper[4612]: I1203 07:58:14.308695 4612 generic.go:334] "Generic (PLEG): container finished" podID="81157c4d-0970-4303-9bde-397f51fb4b65" containerID="0c32d23a9d0be0f65d284c2924c7b3ba49fddd9315bba67be617ee87da017e8d" exitCode=0 Dec 03 07:58:14 crc kubenswrapper[4612]: I1203 07:58:14.308739 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-twlcj" event={"ID":"81157c4d-0970-4303-9bde-397f51fb4b65","Type":"ContainerDied","Data":"0c32d23a9d0be0f65d284c2924c7b3ba49fddd9315bba67be617ee87da017e8d"} Dec 03 07:58:14 crc kubenswrapper[4612]: I1203 07:58:14.325858 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" podStartSLOduration=1.67286684 podStartE2EDuration="2.325838892s" podCreationTimestamp="2025-12-03 07:58:12 +0000 UTC" firstStartedPulling="2025-12-03 07:58:13.275827852 +0000 UTC m=+1856.449185252" lastFinishedPulling="2025-12-03 07:58:13.928799854 +0000 UTC m=+1857.102157304" observedRunningTime="2025-12-03 07:58:14.319579805 +0000 UTC m=+1857.492937205" watchObservedRunningTime="2025-12-03 07:58:14.325838892 +0000 UTC m=+1857.499196292" Dec 03 07:58:16 crc kubenswrapper[4612]: I1203 07:58:16.343257 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-twlcj" event={"ID":"81157c4d-0970-4303-9bde-397f51fb4b65","Type":"ContainerStarted","Data":"0fe3778e8a81db1030f35464d1070bf01a4150d53cf2d3fe81edb3684e45ae8d"} Dec 03 07:58:16 crc kubenswrapper[4612]: I1203 07:58:16.374576 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-twlcj" podStartSLOduration=3.396767983 podStartE2EDuration="7.37455402s" podCreationTimestamp="2025-12-03 07:58:09 +0000 UTC" firstStartedPulling="2025-12-03 07:58:11.27418611 +0000 UTC m=+1854.447543540" lastFinishedPulling="2025-12-03 07:58:15.251972147 +0000 UTC m=+1858.425329577" observedRunningTime="2025-12-03 07:58:16.369060973 +0000 UTC m=+1859.542418383" watchObservedRunningTime="2025-12-03 07:58:16.37455402 +0000 UTC m=+1859.547911430" Dec 03 07:58:19 crc kubenswrapper[4612]: I1203 07:58:19.855079 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:19 crc kubenswrapper[4612]: I1203 07:58:19.855773 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:19 crc kubenswrapper[4612]: I1203 07:58:19.939417 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:20 crc kubenswrapper[4612]: E1203 07:58:20.181412 4612 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7624e359_51e1_46df_829a_12aebc8d3688.slice/crio-conmon-a3be5f71720d1cd561f0d3abe2893fd1ce140e902d7aba74b76aff60df105965.scope\": RecentStats: unable to find data in memory cache]" Dec 03 07:58:20 crc kubenswrapper[4612]: I1203 07:58:20.386180 4612 generic.go:334] "Generic (PLEG): container finished" podID="7624e359-51e1-46df-829a-12aebc8d3688" containerID="a3be5f71720d1cd561f0d3abe2893fd1ce140e902d7aba74b76aff60df105965" exitCode=0 Dec 03 07:58:20 crc kubenswrapper[4612]: I1203 07:58:20.386248 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" event={"ID":"7624e359-51e1-46df-829a-12aebc8d3688","Type":"ContainerDied","Data":"a3be5f71720d1cd561f0d3abe2893fd1ce140e902d7aba74b76aff60df105965"} Dec 03 07:58:20 crc kubenswrapper[4612]: I1203 07:58:20.471395 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:20 crc kubenswrapper[4612]: I1203 07:58:20.535869 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-twlcj"] Dec 03 07:58:21 crc kubenswrapper[4612]: I1203 07:58:21.804085 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" Dec 03 07:58:21 crc kubenswrapper[4612]: I1203 07:58:21.918938 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-888xf\" (UniqueName: \"kubernetes.io/projected/7624e359-51e1-46df-829a-12aebc8d3688-kube-api-access-888xf\") pod \"7624e359-51e1-46df-829a-12aebc8d3688\" (UID: \"7624e359-51e1-46df-829a-12aebc8d3688\") " Dec 03 07:58:21 crc kubenswrapper[4612]: I1203 07:58:21.919135 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7624e359-51e1-46df-829a-12aebc8d3688-inventory\") pod \"7624e359-51e1-46df-829a-12aebc8d3688\" (UID: \"7624e359-51e1-46df-829a-12aebc8d3688\") " Dec 03 07:58:21 crc kubenswrapper[4612]: I1203 07:58:21.919193 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7624e359-51e1-46df-829a-12aebc8d3688-ssh-key\") pod \"7624e359-51e1-46df-829a-12aebc8d3688\" (UID: \"7624e359-51e1-46df-829a-12aebc8d3688\") " Dec 03 07:58:21 crc kubenswrapper[4612]: I1203 07:58:21.927649 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7624e359-51e1-46df-829a-12aebc8d3688-kube-api-access-888xf" (OuterVolumeSpecName: "kube-api-access-888xf") pod "7624e359-51e1-46df-829a-12aebc8d3688" (UID: "7624e359-51e1-46df-829a-12aebc8d3688"). InnerVolumeSpecName "kube-api-access-888xf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:58:21 crc kubenswrapper[4612]: I1203 07:58:21.946657 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7624e359-51e1-46df-829a-12aebc8d3688-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7624e359-51e1-46df-829a-12aebc8d3688" (UID: "7624e359-51e1-46df-829a-12aebc8d3688"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:58:21 crc kubenswrapper[4612]: I1203 07:58:21.966197 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7624e359-51e1-46df-829a-12aebc8d3688-inventory" (OuterVolumeSpecName: "inventory") pod "7624e359-51e1-46df-829a-12aebc8d3688" (UID: "7624e359-51e1-46df-829a-12aebc8d3688"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.021316 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7624e359-51e1-46df-829a-12aebc8d3688-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.021353 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-888xf\" (UniqueName: \"kubernetes.io/projected/7624e359-51e1-46df-829a-12aebc8d3688-kube-api-access-888xf\") on node \"crc\" DevicePath \"\"" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.021367 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7624e359-51e1-46df-829a-12aebc8d3688-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.410558 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" event={"ID":"7624e359-51e1-46df-829a-12aebc8d3688","Type":"ContainerDied","Data":"528acb1cc09f5d68c458ba0f7471974dbc96ce8ecd69e8d110e3ce22596fcf94"} Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.410915 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="528acb1cc09f5d68c458ba0f7471974dbc96ce8ecd69e8d110e3ce22596fcf94" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.410708 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-twlcj" podUID="81157c4d-0970-4303-9bde-397f51fb4b65" containerName="registry-server" containerID="cri-o://0fe3778e8a81db1030f35464d1070bf01a4150d53cf2d3fe81edb3684e45ae8d" gracePeriod=2 Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.411152 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-p95mm" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.493737 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm"] Dec 03 07:58:22 crc kubenswrapper[4612]: E1203 07:58:22.497223 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7624e359-51e1-46df-829a-12aebc8d3688" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.497249 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="7624e359-51e1-46df-829a-12aebc8d3688" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.497467 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="7624e359-51e1-46df-829a-12aebc8d3688" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.498092 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.499844 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.499985 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.504918 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.507218 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.508061 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm"] Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.635910 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g56jm\" (UID: \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.636190 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bp75\" (UniqueName: \"kubernetes.io/projected/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-kube-api-access-7bp75\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g56jm\" (UID: \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.636404 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g56jm\" (UID: \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.738313 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bp75\" (UniqueName: \"kubernetes.io/projected/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-kube-api-access-7bp75\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g56jm\" (UID: \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.738417 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g56jm\" (UID: \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.738518 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g56jm\" (UID: \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.743415 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g56jm\" (UID: \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.747124 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g56jm\" (UID: \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.757808 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bp75\" (UniqueName: \"kubernetes.io/projected/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-kube-api-access-7bp75\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-g56jm\" (UID: \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.841909 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.862807 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.942817 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81157c4d-0970-4303-9bde-397f51fb4b65-catalog-content\") pod \"81157c4d-0970-4303-9bde-397f51fb4b65\" (UID: \"81157c4d-0970-4303-9bde-397f51fb4b65\") " Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.943746 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81157c4d-0970-4303-9bde-397f51fb4b65-utilities\") pod \"81157c4d-0970-4303-9bde-397f51fb4b65\" (UID: \"81157c4d-0970-4303-9bde-397f51fb4b65\") " Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.945118 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qq5ts\" (UniqueName: \"kubernetes.io/projected/81157c4d-0970-4303-9bde-397f51fb4b65-kube-api-access-qq5ts\") pod \"81157c4d-0970-4303-9bde-397f51fb4b65\" (UID: \"81157c4d-0970-4303-9bde-397f51fb4b65\") " Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.944822 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81157c4d-0970-4303-9bde-397f51fb4b65-utilities" (OuterVolumeSpecName: "utilities") pod "81157c4d-0970-4303-9bde-397f51fb4b65" (UID: "81157c4d-0970-4303-9bde-397f51fb4b65"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.946377 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81157c4d-0970-4303-9bde-397f51fb4b65-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 07:58:22 crc kubenswrapper[4612]: I1203 07:58:22.949194 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81157c4d-0970-4303-9bde-397f51fb4b65-kube-api-access-qq5ts" (OuterVolumeSpecName: "kube-api-access-qq5ts") pod "81157c4d-0970-4303-9bde-397f51fb4b65" (UID: "81157c4d-0970-4303-9bde-397f51fb4b65"). InnerVolumeSpecName "kube-api-access-qq5ts". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.047851 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qq5ts\" (UniqueName: \"kubernetes.io/projected/81157c4d-0970-4303-9bde-397f51fb4b65-kube-api-access-qq5ts\") on node \"crc\" DevicePath \"\"" Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.074084 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81157c4d-0970-4303-9bde-397f51fb4b65-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "81157c4d-0970-4303-9bde-397f51fb4b65" (UID: "81157c4d-0970-4303-9bde-397f51fb4b65"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.149972 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81157c4d-0970-4303-9bde-397f51fb4b65-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.425119 4612 generic.go:334] "Generic (PLEG): container finished" podID="81157c4d-0970-4303-9bde-397f51fb4b65" containerID="0fe3778e8a81db1030f35464d1070bf01a4150d53cf2d3fe81edb3684e45ae8d" exitCode=0 Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.425224 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-twlcj" Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.425266 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-twlcj" event={"ID":"81157c4d-0970-4303-9bde-397f51fb4b65","Type":"ContainerDied","Data":"0fe3778e8a81db1030f35464d1070bf01a4150d53cf2d3fe81edb3684e45ae8d"} Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.425457 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-twlcj" event={"ID":"81157c4d-0970-4303-9bde-397f51fb4b65","Type":"ContainerDied","Data":"7e3de0f18fc345954d94b6507e6041615e3399c000678aca4bb533a07accb8a6"} Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.425529 4612 scope.go:117] "RemoveContainer" containerID="0fe3778e8a81db1030f35464d1070bf01a4150d53cf2d3fe81edb3684e45ae8d" Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.460587 4612 scope.go:117] "RemoveContainer" containerID="0c32d23a9d0be0f65d284c2924c7b3ba49fddd9315bba67be617ee87da017e8d" Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.466240 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-twlcj"] Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.479143 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-twlcj"] Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.492211 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm"] Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.493432 4612 scope.go:117] "RemoveContainer" containerID="9fc8c8457e077f67ffda088e7be548b478ce6b9436c9a906e99a3d9dd1816845" Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.518906 4612 scope.go:117] "RemoveContainer" containerID="0fe3778e8a81db1030f35464d1070bf01a4150d53cf2d3fe81edb3684e45ae8d" Dec 03 07:58:23 crc kubenswrapper[4612]: E1203 07:58:23.520478 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fe3778e8a81db1030f35464d1070bf01a4150d53cf2d3fe81edb3684e45ae8d\": container with ID starting with 0fe3778e8a81db1030f35464d1070bf01a4150d53cf2d3fe81edb3684e45ae8d not found: ID does not exist" containerID="0fe3778e8a81db1030f35464d1070bf01a4150d53cf2d3fe81edb3684e45ae8d" Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.520546 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fe3778e8a81db1030f35464d1070bf01a4150d53cf2d3fe81edb3684e45ae8d"} err="failed to get container status \"0fe3778e8a81db1030f35464d1070bf01a4150d53cf2d3fe81edb3684e45ae8d\": rpc error: code = NotFound desc = could not find container \"0fe3778e8a81db1030f35464d1070bf01a4150d53cf2d3fe81edb3684e45ae8d\": container with ID starting with 0fe3778e8a81db1030f35464d1070bf01a4150d53cf2d3fe81edb3684e45ae8d not found: ID does not exist" Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.520584 4612 scope.go:117] "RemoveContainer" containerID="0c32d23a9d0be0f65d284c2924c7b3ba49fddd9315bba67be617ee87da017e8d" Dec 03 07:58:23 crc kubenswrapper[4612]: E1203 07:58:23.521184 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c32d23a9d0be0f65d284c2924c7b3ba49fddd9315bba67be617ee87da017e8d\": container with ID starting with 0c32d23a9d0be0f65d284c2924c7b3ba49fddd9315bba67be617ee87da017e8d not found: ID does not exist" containerID="0c32d23a9d0be0f65d284c2924c7b3ba49fddd9315bba67be617ee87da017e8d" Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.521226 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c32d23a9d0be0f65d284c2924c7b3ba49fddd9315bba67be617ee87da017e8d"} err="failed to get container status \"0c32d23a9d0be0f65d284c2924c7b3ba49fddd9315bba67be617ee87da017e8d\": rpc error: code = NotFound desc = could not find container \"0c32d23a9d0be0f65d284c2924c7b3ba49fddd9315bba67be617ee87da017e8d\": container with ID starting with 0c32d23a9d0be0f65d284c2924c7b3ba49fddd9315bba67be617ee87da017e8d not found: ID does not exist" Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.521255 4612 scope.go:117] "RemoveContainer" containerID="9fc8c8457e077f67ffda088e7be548b478ce6b9436c9a906e99a3d9dd1816845" Dec 03 07:58:23 crc kubenswrapper[4612]: E1203 07:58:23.522290 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fc8c8457e077f67ffda088e7be548b478ce6b9436c9a906e99a3d9dd1816845\": container with ID starting with 9fc8c8457e077f67ffda088e7be548b478ce6b9436c9a906e99a3d9dd1816845 not found: ID does not exist" containerID="9fc8c8457e077f67ffda088e7be548b478ce6b9436c9a906e99a3d9dd1816845" Dec 03 07:58:23 crc kubenswrapper[4612]: I1203 07:58:23.522313 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fc8c8457e077f67ffda088e7be548b478ce6b9436c9a906e99a3d9dd1816845"} err="failed to get container status \"9fc8c8457e077f67ffda088e7be548b478ce6b9436c9a906e99a3d9dd1816845\": rpc error: code = NotFound desc = could not find container \"9fc8c8457e077f67ffda088e7be548b478ce6b9436c9a906e99a3d9dd1816845\": container with ID starting with 9fc8c8457e077f67ffda088e7be548b478ce6b9436c9a906e99a3d9dd1816845 not found: ID does not exist" Dec 03 07:58:24 crc kubenswrapper[4612]: I1203 07:58:24.438917 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" event={"ID":"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd","Type":"ContainerStarted","Data":"72e9376823faf47856bde2dada6562ffb5c606b845ba2a7a3b209b2ab153128a"} Dec 03 07:58:24 crc kubenswrapper[4612]: I1203 07:58:24.439360 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" event={"ID":"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd","Type":"ContainerStarted","Data":"cb00d8ccbfb70c57c00b2ab76605a2142f37a7cbcc61d29688049395161f0315"} Dec 03 07:58:24 crc kubenswrapper[4612]: I1203 07:58:24.459677 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" podStartSLOduration=2.00010763 podStartE2EDuration="2.45966131s" podCreationTimestamp="2025-12-03 07:58:22 +0000 UTC" firstStartedPulling="2025-12-03 07:58:23.493527745 +0000 UTC m=+1866.666885145" lastFinishedPulling="2025-12-03 07:58:23.953081415 +0000 UTC m=+1867.126438825" observedRunningTime="2025-12-03 07:58:24.451856045 +0000 UTC m=+1867.625213445" watchObservedRunningTime="2025-12-03 07:58:24.45966131 +0000 UTC m=+1867.633018710" Dec 03 07:58:25 crc kubenswrapper[4612]: I1203 07:58:25.101596 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81157c4d-0970-4303-9bde-397f51fb4b65" path="/var/lib/kubelet/pods/81157c4d-0970-4303-9bde-397f51fb4b65/volumes" Dec 03 07:58:31 crc kubenswrapper[4612]: I1203 07:58:31.125827 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-66qkn"] Dec 03 07:58:31 crc kubenswrapper[4612]: I1203 07:58:31.135980 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-66qkn"] Dec 03 07:58:33 crc kubenswrapper[4612]: I1203 07:58:33.107803 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94028f3c-def3-439a-9a40-07c39f058702" path="/var/lib/kubelet/pods/94028f3c-def3-439a-9a40-07c39f058702/volumes" Dec 03 07:58:45 crc kubenswrapper[4612]: I1203 07:58:45.082613 4612 scope.go:117] "RemoveContainer" containerID="96231eca110fd3215d16c0bab4de17fae65d15d898792760701a5d38ed1d10cc" Dec 03 07:58:45 crc kubenswrapper[4612]: I1203 07:58:45.140286 4612 scope.go:117] "RemoveContainer" containerID="c8bdac0076f6d078133ee2b1841130120fbf19dd228a40d36f691c6c6d19d551" Dec 03 07:58:45 crc kubenswrapper[4612]: I1203 07:58:45.210565 4612 scope.go:117] "RemoveContainer" containerID="c2c9d678cfcac6cd47687aaf57705cf766653520efd2f5c46ca38c7a6ea0123f" Dec 03 07:59:11 crc kubenswrapper[4612]: I1203 07:59:11.980708 4612 generic.go:334] "Generic (PLEG): container finished" podID="eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd" containerID="72e9376823faf47856bde2dada6562ffb5c606b845ba2a7a3b209b2ab153128a" exitCode=0 Dec 03 07:59:11 crc kubenswrapper[4612]: I1203 07:59:11.980830 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" event={"ID":"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd","Type":"ContainerDied","Data":"72e9376823faf47856bde2dada6562ffb5c606b845ba2a7a3b209b2ab153128a"} Dec 03 07:59:13 crc kubenswrapper[4612]: I1203 07:59:13.390023 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" Dec 03 07:59:13 crc kubenswrapper[4612]: I1203 07:59:13.403607 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-ssh-key\") pod \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\" (UID: \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\") " Dec 03 07:59:13 crc kubenswrapper[4612]: I1203 07:59:13.403741 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bp75\" (UniqueName: \"kubernetes.io/projected/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-kube-api-access-7bp75\") pod \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\" (UID: \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\") " Dec 03 07:59:13 crc kubenswrapper[4612]: I1203 07:59:13.403764 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-inventory\") pod \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\" (UID: \"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd\") " Dec 03 07:59:13 crc kubenswrapper[4612]: I1203 07:59:13.410733 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-kube-api-access-7bp75" (OuterVolumeSpecName: "kube-api-access-7bp75") pod "eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd" (UID: "eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd"). InnerVolumeSpecName "kube-api-access-7bp75". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 07:59:13 crc kubenswrapper[4612]: I1203 07:59:13.450396 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd" (UID: "eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:59:13 crc kubenswrapper[4612]: I1203 07:59:13.465694 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-inventory" (OuterVolumeSpecName: "inventory") pod "eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd" (UID: "eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 07:59:13 crc kubenswrapper[4612]: I1203 07:59:13.505438 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 07:59:13 crc kubenswrapper[4612]: I1203 07:59:13.505551 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bp75\" (UniqueName: \"kubernetes.io/projected/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-kube-api-access-7bp75\") on node \"crc\" DevicePath \"\"" Dec 03 07:59:13 crc kubenswrapper[4612]: I1203 07:59:13.505623 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.000971 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" event={"ID":"eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd","Type":"ContainerDied","Data":"cb00d8ccbfb70c57c00b2ab76605a2142f37a7cbcc61d29688049395161f0315"} Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.001027 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb00d8ccbfb70c57c00b2ab76605a2142f37a7cbcc61d29688049395161f0315" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.001029 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-g56jm" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.098592 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k"] Dec 03 07:59:14 crc kubenswrapper[4612]: E1203 07:59:14.099067 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81157c4d-0970-4303-9bde-397f51fb4b65" containerName="extract-content" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.099088 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="81157c4d-0970-4303-9bde-397f51fb4b65" containerName="extract-content" Dec 03 07:59:14 crc kubenswrapper[4612]: E1203 07:59:14.099099 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81157c4d-0970-4303-9bde-397f51fb4b65" containerName="registry-server" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.099108 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="81157c4d-0970-4303-9bde-397f51fb4b65" containerName="registry-server" Dec 03 07:59:14 crc kubenswrapper[4612]: E1203 07:59:14.099146 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.099158 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 07:59:14 crc kubenswrapper[4612]: E1203 07:59:14.099176 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81157c4d-0970-4303-9bde-397f51fb4b65" containerName="extract-utilities" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.099184 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="81157c4d-0970-4303-9bde-397f51fb4b65" containerName="extract-utilities" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.099399 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.099431 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="81157c4d-0970-4303-9bde-397f51fb4b65" containerName="registry-server" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.100099 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.105675 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.105805 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.106060 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.109490 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.115013 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f20be1ec-5ef4-4559-8a86-e857886c0856-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l892k\" (UID: \"f20be1ec-5ef4-4559-8a86-e857886c0856\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.115159 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np8fm\" (UniqueName: \"kubernetes.io/projected/f20be1ec-5ef4-4559-8a86-e857886c0856-kube-api-access-np8fm\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l892k\" (UID: \"f20be1ec-5ef4-4559-8a86-e857886c0856\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.115212 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f20be1ec-5ef4-4559-8a86-e857886c0856-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l892k\" (UID: \"f20be1ec-5ef4-4559-8a86-e857886c0856\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.128354 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k"] Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.217366 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f20be1ec-5ef4-4559-8a86-e857886c0856-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l892k\" (UID: \"f20be1ec-5ef4-4559-8a86-e857886c0856\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.217539 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np8fm\" (UniqueName: \"kubernetes.io/projected/f20be1ec-5ef4-4559-8a86-e857886c0856-kube-api-access-np8fm\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l892k\" (UID: \"f20be1ec-5ef4-4559-8a86-e857886c0856\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.217584 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f20be1ec-5ef4-4559-8a86-e857886c0856-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l892k\" (UID: \"f20be1ec-5ef4-4559-8a86-e857886c0856\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.221301 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f20be1ec-5ef4-4559-8a86-e857886c0856-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l892k\" (UID: \"f20be1ec-5ef4-4559-8a86-e857886c0856\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.221558 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f20be1ec-5ef4-4559-8a86-e857886c0856-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l892k\" (UID: \"f20be1ec-5ef4-4559-8a86-e857886c0856\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.231674 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np8fm\" (UniqueName: \"kubernetes.io/projected/f20be1ec-5ef4-4559-8a86-e857886c0856-kube-api-access-np8fm\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l892k\" (UID: \"f20be1ec-5ef4-4559-8a86-e857886c0856\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.426178 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" Dec 03 07:59:14 crc kubenswrapper[4612]: I1203 07:59:14.870205 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k"] Dec 03 07:59:15 crc kubenswrapper[4612]: I1203 07:59:15.012801 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" event={"ID":"f20be1ec-5ef4-4559-8a86-e857886c0856","Type":"ContainerStarted","Data":"8ac080a130902dfa007f19d86ad7a511d584091f785fec5fa547040d6e20a05b"} Dec 03 07:59:16 crc kubenswrapper[4612]: I1203 07:59:16.023251 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" event={"ID":"f20be1ec-5ef4-4559-8a86-e857886c0856","Type":"ContainerStarted","Data":"28de6c75e03b2bff5b87949cd24d11c583c5bfba2481729808d2f65554bf0f4f"} Dec 03 07:59:16 crc kubenswrapper[4612]: I1203 07:59:16.049234 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" podStartSLOduration=1.519238731 podStartE2EDuration="2.04921548s" podCreationTimestamp="2025-12-03 07:59:14 +0000 UTC" firstStartedPulling="2025-12-03 07:59:14.87191788 +0000 UTC m=+1918.045275300" lastFinishedPulling="2025-12-03 07:59:15.401894619 +0000 UTC m=+1918.575252049" observedRunningTime="2025-12-03 07:59:16.040207675 +0000 UTC m=+1919.213565085" watchObservedRunningTime="2025-12-03 07:59:16.04921548 +0000 UTC m=+1919.222572890" Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.160723 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z"] Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.163378 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.170500 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.170550 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.176891 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z"] Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.218995 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/578f949e-0359-4447-b1cf-4730fac4373d-secret-volume\") pod \"collect-profiles-29412480-x4p2z\" (UID: \"578f949e-0359-4447-b1cf-4730fac4373d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.219188 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhdx7\" (UniqueName: \"kubernetes.io/projected/578f949e-0359-4447-b1cf-4730fac4373d-kube-api-access-xhdx7\") pod \"collect-profiles-29412480-x4p2z\" (UID: \"578f949e-0359-4447-b1cf-4730fac4373d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.219264 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/578f949e-0359-4447-b1cf-4730fac4373d-config-volume\") pod \"collect-profiles-29412480-x4p2z\" (UID: \"578f949e-0359-4447-b1cf-4730fac4373d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.321769 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/578f949e-0359-4447-b1cf-4730fac4373d-secret-volume\") pod \"collect-profiles-29412480-x4p2z\" (UID: \"578f949e-0359-4447-b1cf-4730fac4373d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.321908 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhdx7\" (UniqueName: \"kubernetes.io/projected/578f949e-0359-4447-b1cf-4730fac4373d-kube-api-access-xhdx7\") pod \"collect-profiles-29412480-x4p2z\" (UID: \"578f949e-0359-4447-b1cf-4730fac4373d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.321984 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/578f949e-0359-4447-b1cf-4730fac4373d-config-volume\") pod \"collect-profiles-29412480-x4p2z\" (UID: \"578f949e-0359-4447-b1cf-4730fac4373d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.322896 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/578f949e-0359-4447-b1cf-4730fac4373d-config-volume\") pod \"collect-profiles-29412480-x4p2z\" (UID: \"578f949e-0359-4447-b1cf-4730fac4373d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.331653 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/578f949e-0359-4447-b1cf-4730fac4373d-secret-volume\") pod \"collect-profiles-29412480-x4p2z\" (UID: \"578f949e-0359-4447-b1cf-4730fac4373d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.343309 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhdx7\" (UniqueName: \"kubernetes.io/projected/578f949e-0359-4447-b1cf-4730fac4373d-kube-api-access-xhdx7\") pod \"collect-profiles-29412480-x4p2z\" (UID: \"578f949e-0359-4447-b1cf-4730fac4373d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.484754 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" Dec 03 08:00:00 crc kubenswrapper[4612]: I1203 08:00:00.965797 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z"] Dec 03 08:00:01 crc kubenswrapper[4612]: I1203 08:00:01.457764 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" event={"ID":"578f949e-0359-4447-b1cf-4730fac4373d","Type":"ContainerStarted","Data":"b7b2d4b46af12fcd2dcd920d7aebc4265a810b52e6f12b923b346191057e2fc1"} Dec 03 08:00:01 crc kubenswrapper[4612]: I1203 08:00:01.457813 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" event={"ID":"578f949e-0359-4447-b1cf-4730fac4373d","Type":"ContainerStarted","Data":"837c653cf11a156867389e154647bf6fd5919914538bd04dcb5651697655f24a"} Dec 03 08:00:02 crc kubenswrapper[4612]: I1203 08:00:02.469721 4612 generic.go:334] "Generic (PLEG): container finished" podID="578f949e-0359-4447-b1cf-4730fac4373d" containerID="b7b2d4b46af12fcd2dcd920d7aebc4265a810b52e6f12b923b346191057e2fc1" exitCode=0 Dec 03 08:00:02 crc kubenswrapper[4612]: I1203 08:00:02.469835 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" event={"ID":"578f949e-0359-4447-b1cf-4730fac4373d","Type":"ContainerDied","Data":"b7b2d4b46af12fcd2dcd920d7aebc4265a810b52e6f12b923b346191057e2fc1"} Dec 03 08:00:02 crc kubenswrapper[4612]: I1203 08:00:02.857716 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" Dec 03 08:00:02 crc kubenswrapper[4612]: I1203 08:00:02.879841 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/578f949e-0359-4447-b1cf-4730fac4373d-config-volume\") pod \"578f949e-0359-4447-b1cf-4730fac4373d\" (UID: \"578f949e-0359-4447-b1cf-4730fac4373d\") " Dec 03 08:00:02 crc kubenswrapper[4612]: I1203 08:00:02.880020 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhdx7\" (UniqueName: \"kubernetes.io/projected/578f949e-0359-4447-b1cf-4730fac4373d-kube-api-access-xhdx7\") pod \"578f949e-0359-4447-b1cf-4730fac4373d\" (UID: \"578f949e-0359-4447-b1cf-4730fac4373d\") " Dec 03 08:00:02 crc kubenswrapper[4612]: I1203 08:00:02.880056 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/578f949e-0359-4447-b1cf-4730fac4373d-secret-volume\") pod \"578f949e-0359-4447-b1cf-4730fac4373d\" (UID: \"578f949e-0359-4447-b1cf-4730fac4373d\") " Dec 03 08:00:02 crc kubenswrapper[4612]: I1203 08:00:02.881751 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/578f949e-0359-4447-b1cf-4730fac4373d-config-volume" (OuterVolumeSpecName: "config-volume") pod "578f949e-0359-4447-b1cf-4730fac4373d" (UID: "578f949e-0359-4447-b1cf-4730fac4373d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:00:02 crc kubenswrapper[4612]: I1203 08:00:02.885860 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/578f949e-0359-4447-b1cf-4730fac4373d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "578f949e-0359-4447-b1cf-4730fac4373d" (UID: "578f949e-0359-4447-b1cf-4730fac4373d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:00:02 crc kubenswrapper[4612]: I1203 08:00:02.886333 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/578f949e-0359-4447-b1cf-4730fac4373d-kube-api-access-xhdx7" (OuterVolumeSpecName: "kube-api-access-xhdx7") pod "578f949e-0359-4447-b1cf-4730fac4373d" (UID: "578f949e-0359-4447-b1cf-4730fac4373d"). InnerVolumeSpecName "kube-api-access-xhdx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:00:02 crc kubenswrapper[4612]: I1203 08:00:02.981893 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhdx7\" (UniqueName: \"kubernetes.io/projected/578f949e-0359-4447-b1cf-4730fac4373d-kube-api-access-xhdx7\") on node \"crc\" DevicePath \"\"" Dec 03 08:00:02 crc kubenswrapper[4612]: I1203 08:00:02.981923 4612 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/578f949e-0359-4447-b1cf-4730fac4373d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 08:00:02 crc kubenswrapper[4612]: I1203 08:00:02.981932 4612 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/578f949e-0359-4447-b1cf-4730fac4373d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 08:00:03 crc kubenswrapper[4612]: I1203 08:00:03.482014 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" event={"ID":"578f949e-0359-4447-b1cf-4730fac4373d","Type":"ContainerDied","Data":"837c653cf11a156867389e154647bf6fd5919914538bd04dcb5651697655f24a"} Dec 03 08:00:03 crc kubenswrapper[4612]: I1203 08:00:03.482050 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="837c653cf11a156867389e154647bf6fd5919914538bd04dcb5651697655f24a" Dec 03 08:00:03 crc kubenswrapper[4612]: I1203 08:00:03.482099 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z" Dec 03 08:00:03 crc kubenswrapper[4612]: I1203 08:00:03.951058 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh"] Dec 03 08:00:03 crc kubenswrapper[4612]: I1203 08:00:03.959927 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412435-6x4wh"] Dec 03 08:00:05 crc kubenswrapper[4612]: I1203 08:00:05.107572 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df610433-e46b-4098-9b66-0fbf5a28899f" path="/var/lib/kubelet/pods/df610433-e46b-4098-9b66-0fbf5a28899f/volumes" Dec 03 08:00:17 crc kubenswrapper[4612]: I1203 08:00:17.136097 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:00:17 crc kubenswrapper[4612]: I1203 08:00:17.136914 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:00:21 crc kubenswrapper[4612]: I1203 08:00:21.670564 4612 generic.go:334] "Generic (PLEG): container finished" podID="f20be1ec-5ef4-4559-8a86-e857886c0856" containerID="28de6c75e03b2bff5b87949cd24d11c583c5bfba2481729808d2f65554bf0f4f" exitCode=0 Dec 03 08:00:21 crc kubenswrapper[4612]: I1203 08:00:21.670646 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" event={"ID":"f20be1ec-5ef4-4559-8a86-e857886c0856","Type":"ContainerDied","Data":"28de6c75e03b2bff5b87949cd24d11c583c5bfba2481729808d2f65554bf0f4f"} Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.152737 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.289538 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-np8fm\" (UniqueName: \"kubernetes.io/projected/f20be1ec-5ef4-4559-8a86-e857886c0856-kube-api-access-np8fm\") pod \"f20be1ec-5ef4-4559-8a86-e857886c0856\" (UID: \"f20be1ec-5ef4-4559-8a86-e857886c0856\") " Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.289746 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f20be1ec-5ef4-4559-8a86-e857886c0856-inventory\") pod \"f20be1ec-5ef4-4559-8a86-e857886c0856\" (UID: \"f20be1ec-5ef4-4559-8a86-e857886c0856\") " Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.289895 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f20be1ec-5ef4-4559-8a86-e857886c0856-ssh-key\") pod \"f20be1ec-5ef4-4559-8a86-e857886c0856\" (UID: \"f20be1ec-5ef4-4559-8a86-e857886c0856\") " Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.303078 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f20be1ec-5ef4-4559-8a86-e857886c0856-kube-api-access-np8fm" (OuterVolumeSpecName: "kube-api-access-np8fm") pod "f20be1ec-5ef4-4559-8a86-e857886c0856" (UID: "f20be1ec-5ef4-4559-8a86-e857886c0856"). InnerVolumeSpecName "kube-api-access-np8fm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.316452 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f20be1ec-5ef4-4559-8a86-e857886c0856-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f20be1ec-5ef4-4559-8a86-e857886c0856" (UID: "f20be1ec-5ef4-4559-8a86-e857886c0856"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.326799 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f20be1ec-5ef4-4559-8a86-e857886c0856-inventory" (OuterVolumeSpecName: "inventory") pod "f20be1ec-5ef4-4559-8a86-e857886c0856" (UID: "f20be1ec-5ef4-4559-8a86-e857886c0856"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.392708 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-np8fm\" (UniqueName: \"kubernetes.io/projected/f20be1ec-5ef4-4559-8a86-e857886c0856-kube-api-access-np8fm\") on node \"crc\" DevicePath \"\"" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.393165 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f20be1ec-5ef4-4559-8a86-e857886c0856-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.393258 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f20be1ec-5ef4-4559-8a86-e857886c0856-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.710344 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" event={"ID":"f20be1ec-5ef4-4559-8a86-e857886c0856","Type":"ContainerDied","Data":"8ac080a130902dfa007f19d86ad7a511d584091f785fec5fa547040d6e20a05b"} Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.710389 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ac080a130902dfa007f19d86ad7a511d584091f785fec5fa547040d6e20a05b" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.710468 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l892k" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.805443 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-bkpsr"] Dec 03 08:00:23 crc kubenswrapper[4612]: E1203 08:00:23.805957 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f20be1ec-5ef4-4559-8a86-e857886c0856" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.805979 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f20be1ec-5ef4-4559-8a86-e857886c0856" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 08:00:23 crc kubenswrapper[4612]: E1203 08:00:23.806002 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578f949e-0359-4447-b1cf-4730fac4373d" containerName="collect-profiles" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.806012 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="578f949e-0359-4447-b1cf-4730fac4373d" containerName="collect-profiles" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.806229 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="578f949e-0359-4447-b1cf-4730fac4373d" containerName="collect-profiles" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.806253 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f20be1ec-5ef4-4559-8a86-e857886c0856" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.807077 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.809610 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.809761 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.810711 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.810922 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.861613 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-bkpsr"] Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.903432 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7a053178-34a7-49d7-8119-09c53336a553-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-bkpsr\" (UID: \"7a053178-34a7-49d7-8119-09c53336a553\") " pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.903538 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7a053178-34a7-49d7-8119-09c53336a553-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-bkpsr\" (UID: \"7a053178-34a7-49d7-8119-09c53336a553\") " pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" Dec 03 08:00:23 crc kubenswrapper[4612]: I1203 08:00:23.903665 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hpzh\" (UniqueName: \"kubernetes.io/projected/7a053178-34a7-49d7-8119-09c53336a553-kube-api-access-8hpzh\") pod \"ssh-known-hosts-edpm-deployment-bkpsr\" (UID: \"7a053178-34a7-49d7-8119-09c53336a553\") " pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" Dec 03 08:00:24 crc kubenswrapper[4612]: I1203 08:00:24.005434 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7a053178-34a7-49d7-8119-09c53336a553-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-bkpsr\" (UID: \"7a053178-34a7-49d7-8119-09c53336a553\") " pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" Dec 03 08:00:24 crc kubenswrapper[4612]: I1203 08:00:24.005532 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7a053178-34a7-49d7-8119-09c53336a553-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-bkpsr\" (UID: \"7a053178-34a7-49d7-8119-09c53336a553\") " pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" Dec 03 08:00:24 crc kubenswrapper[4612]: I1203 08:00:24.005651 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hpzh\" (UniqueName: \"kubernetes.io/projected/7a053178-34a7-49d7-8119-09c53336a553-kube-api-access-8hpzh\") pod \"ssh-known-hosts-edpm-deployment-bkpsr\" (UID: \"7a053178-34a7-49d7-8119-09c53336a553\") " pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" Dec 03 08:00:24 crc kubenswrapper[4612]: I1203 08:00:24.009708 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7a053178-34a7-49d7-8119-09c53336a553-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-bkpsr\" (UID: \"7a053178-34a7-49d7-8119-09c53336a553\") " pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" Dec 03 08:00:24 crc kubenswrapper[4612]: I1203 08:00:24.010001 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7a053178-34a7-49d7-8119-09c53336a553-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-bkpsr\" (UID: \"7a053178-34a7-49d7-8119-09c53336a553\") " pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" Dec 03 08:00:24 crc kubenswrapper[4612]: I1203 08:00:24.024886 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hpzh\" (UniqueName: \"kubernetes.io/projected/7a053178-34a7-49d7-8119-09c53336a553-kube-api-access-8hpzh\") pod \"ssh-known-hosts-edpm-deployment-bkpsr\" (UID: \"7a053178-34a7-49d7-8119-09c53336a553\") " pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" Dec 03 08:00:24 crc kubenswrapper[4612]: I1203 08:00:24.175535 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" Dec 03 08:00:24 crc kubenswrapper[4612]: I1203 08:00:24.837204 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-bkpsr"] Dec 03 08:00:24 crc kubenswrapper[4612]: I1203 08:00:24.840723 4612 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 08:00:25 crc kubenswrapper[4612]: I1203 08:00:25.727373 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" event={"ID":"7a053178-34a7-49d7-8119-09c53336a553","Type":"ContainerStarted","Data":"6a1bc271b95b816d39b5fc94b83e1fcf3591a5849f0721b1d01ffadc4e97bb36"} Dec 03 08:00:25 crc kubenswrapper[4612]: I1203 08:00:25.728771 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" event={"ID":"7a053178-34a7-49d7-8119-09c53336a553","Type":"ContainerStarted","Data":"6a108344aa71d699ff1a04a9aa14e15a2df09ba1b6127404a234355a43116474"} Dec 03 08:00:25 crc kubenswrapper[4612]: I1203 08:00:25.748400 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" podStartSLOduration=2.186994666 podStartE2EDuration="2.74838075s" podCreationTimestamp="2025-12-03 08:00:23 +0000 UTC" firstStartedPulling="2025-12-03 08:00:24.840431589 +0000 UTC m=+1988.013788989" lastFinishedPulling="2025-12-03 08:00:25.401817673 +0000 UTC m=+1988.575175073" observedRunningTime="2025-12-03 08:00:25.745807156 +0000 UTC m=+1988.919164556" watchObservedRunningTime="2025-12-03 08:00:25.74838075 +0000 UTC m=+1988.921738160" Dec 03 08:00:33 crc kubenswrapper[4612]: I1203 08:00:33.802735 4612 generic.go:334] "Generic (PLEG): container finished" podID="7a053178-34a7-49d7-8119-09c53336a553" containerID="6a1bc271b95b816d39b5fc94b83e1fcf3591a5849f0721b1d01ffadc4e97bb36" exitCode=0 Dec 03 08:00:33 crc kubenswrapper[4612]: I1203 08:00:33.802891 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" event={"ID":"7a053178-34a7-49d7-8119-09c53336a553","Type":"ContainerDied","Data":"6a1bc271b95b816d39b5fc94b83e1fcf3591a5849f0721b1d01ffadc4e97bb36"} Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.266091 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.444916 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hpzh\" (UniqueName: \"kubernetes.io/projected/7a053178-34a7-49d7-8119-09c53336a553-kube-api-access-8hpzh\") pod \"7a053178-34a7-49d7-8119-09c53336a553\" (UID: \"7a053178-34a7-49d7-8119-09c53336a553\") " Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.445192 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7a053178-34a7-49d7-8119-09c53336a553-inventory-0\") pod \"7a053178-34a7-49d7-8119-09c53336a553\" (UID: \"7a053178-34a7-49d7-8119-09c53336a553\") " Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.445239 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7a053178-34a7-49d7-8119-09c53336a553-ssh-key-openstack-edpm-ipam\") pod \"7a053178-34a7-49d7-8119-09c53336a553\" (UID: \"7a053178-34a7-49d7-8119-09c53336a553\") " Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.454312 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a053178-34a7-49d7-8119-09c53336a553-kube-api-access-8hpzh" (OuterVolumeSpecName: "kube-api-access-8hpzh") pod "7a053178-34a7-49d7-8119-09c53336a553" (UID: "7a053178-34a7-49d7-8119-09c53336a553"). InnerVolumeSpecName "kube-api-access-8hpzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.481304 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a053178-34a7-49d7-8119-09c53336a553-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "7a053178-34a7-49d7-8119-09c53336a553" (UID: "7a053178-34a7-49d7-8119-09c53336a553"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.482086 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a053178-34a7-49d7-8119-09c53336a553-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "7a053178-34a7-49d7-8119-09c53336a553" (UID: "7a053178-34a7-49d7-8119-09c53336a553"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.547322 4612 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7a053178-34a7-49d7-8119-09c53336a553-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.547362 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7a053178-34a7-49d7-8119-09c53336a553-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.547377 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hpzh\" (UniqueName: \"kubernetes.io/projected/7a053178-34a7-49d7-8119-09c53336a553-kube-api-access-8hpzh\") on node \"crc\" DevicePath \"\"" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.823155 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" event={"ID":"7a053178-34a7-49d7-8119-09c53336a553","Type":"ContainerDied","Data":"6a108344aa71d699ff1a04a9aa14e15a2df09ba1b6127404a234355a43116474"} Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.823588 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a108344aa71d699ff1a04a9aa14e15a2df09ba1b6127404a234355a43116474" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.823207 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-bkpsr" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.930606 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw"] Dec 03 08:00:35 crc kubenswrapper[4612]: E1203 08:00:35.931066 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a053178-34a7-49d7-8119-09c53336a553" containerName="ssh-known-hosts-edpm-deployment" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.931089 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a053178-34a7-49d7-8119-09c53336a553" containerName="ssh-known-hosts-edpm-deployment" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.931345 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a053178-34a7-49d7-8119-09c53336a553" containerName="ssh-known-hosts-edpm-deployment" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.932072 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.934162 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.934558 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.934879 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.935183 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 08:00:35 crc kubenswrapper[4612]: I1203 08:00:35.946838 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw"] Dec 03 08:00:36 crc kubenswrapper[4612]: I1203 08:00:36.058195 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bcf7\" (UniqueName: \"kubernetes.io/projected/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-kube-api-access-4bcf7\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vwxdw\" (UID: \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" Dec 03 08:00:36 crc kubenswrapper[4612]: I1203 08:00:36.058258 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vwxdw\" (UID: \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" Dec 03 08:00:36 crc kubenswrapper[4612]: I1203 08:00:36.058305 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vwxdw\" (UID: \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" Dec 03 08:00:36 crc kubenswrapper[4612]: I1203 08:00:36.159756 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bcf7\" (UniqueName: \"kubernetes.io/projected/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-kube-api-access-4bcf7\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vwxdw\" (UID: \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" Dec 03 08:00:36 crc kubenswrapper[4612]: I1203 08:00:36.159910 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vwxdw\" (UID: \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" Dec 03 08:00:36 crc kubenswrapper[4612]: I1203 08:00:36.160950 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vwxdw\" (UID: \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" Dec 03 08:00:36 crc kubenswrapper[4612]: I1203 08:00:36.170928 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vwxdw\" (UID: \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" Dec 03 08:00:36 crc kubenswrapper[4612]: I1203 08:00:36.176113 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vwxdw\" (UID: \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" Dec 03 08:00:36 crc kubenswrapper[4612]: I1203 08:00:36.222873 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bcf7\" (UniqueName: \"kubernetes.io/projected/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-kube-api-access-4bcf7\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vwxdw\" (UID: \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" Dec 03 08:00:36 crc kubenswrapper[4612]: I1203 08:00:36.263497 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" Dec 03 08:00:36 crc kubenswrapper[4612]: I1203 08:00:36.835027 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw"] Dec 03 08:00:36 crc kubenswrapper[4612]: W1203 08:00:36.838987 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod18e6e5c3_b90b_429e_9b89_c94e3f20ecdd.slice/crio-fc78a770b10473fbe57ce9cbb54e705214cf48c2c1ef27b7aba6add95042410e WatchSource:0}: Error finding container fc78a770b10473fbe57ce9cbb54e705214cf48c2c1ef27b7aba6add95042410e: Status 404 returned error can't find the container with id fc78a770b10473fbe57ce9cbb54e705214cf48c2c1ef27b7aba6add95042410e Dec 03 08:00:37 crc kubenswrapper[4612]: I1203 08:00:37.845239 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" event={"ID":"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd","Type":"ContainerStarted","Data":"14af5c59b3fc9a58778e446134d7a77dc38fee7e4327b020a10399402d9d986d"} Dec 03 08:00:37 crc kubenswrapper[4612]: I1203 08:00:37.845725 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" event={"ID":"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd","Type":"ContainerStarted","Data":"fc78a770b10473fbe57ce9cbb54e705214cf48c2c1ef27b7aba6add95042410e"} Dec 03 08:00:45 crc kubenswrapper[4612]: I1203 08:00:45.368938 4612 scope.go:117] "RemoveContainer" containerID="0313b89a01b1e88c497bb0987bf04b83339efe89aefc5300569173f691408c19" Dec 03 08:00:46 crc kubenswrapper[4612]: I1203 08:00:46.927278 4612 generic.go:334] "Generic (PLEG): container finished" podID="18e6e5c3-b90b-429e-9b89-c94e3f20ecdd" containerID="14af5c59b3fc9a58778e446134d7a77dc38fee7e4327b020a10399402d9d986d" exitCode=0 Dec 03 08:00:46 crc kubenswrapper[4612]: I1203 08:00:46.927363 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" event={"ID":"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd","Type":"ContainerDied","Data":"14af5c59b3fc9a58778e446134d7a77dc38fee7e4327b020a10399402d9d986d"} Dec 03 08:00:47 crc kubenswrapper[4612]: I1203 08:00:47.136073 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:00:47 crc kubenswrapper[4612]: I1203 08:00:47.136145 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:00:48 crc kubenswrapper[4612]: I1203 08:00:48.341412 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" Dec 03 08:00:48 crc kubenswrapper[4612]: I1203 08:00:48.529077 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-ssh-key\") pod \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\" (UID: \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\") " Dec 03 08:00:48 crc kubenswrapper[4612]: I1203 08:00:48.529170 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-inventory\") pod \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\" (UID: \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\") " Dec 03 08:00:48 crc kubenswrapper[4612]: I1203 08:00:48.529265 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bcf7\" (UniqueName: \"kubernetes.io/projected/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-kube-api-access-4bcf7\") pod \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\" (UID: \"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd\") " Dec 03 08:00:48 crc kubenswrapper[4612]: I1203 08:00:48.548856 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-kube-api-access-4bcf7" (OuterVolumeSpecName: "kube-api-access-4bcf7") pod "18e6e5c3-b90b-429e-9b89-c94e3f20ecdd" (UID: "18e6e5c3-b90b-429e-9b89-c94e3f20ecdd"). InnerVolumeSpecName "kube-api-access-4bcf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:00:48 crc kubenswrapper[4612]: I1203 08:00:48.557228 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "18e6e5c3-b90b-429e-9b89-c94e3f20ecdd" (UID: "18e6e5c3-b90b-429e-9b89-c94e3f20ecdd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:00:48 crc kubenswrapper[4612]: I1203 08:00:48.558432 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-inventory" (OuterVolumeSpecName: "inventory") pod "18e6e5c3-b90b-429e-9b89-c94e3f20ecdd" (UID: "18e6e5c3-b90b-429e-9b89-c94e3f20ecdd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:00:48 crc kubenswrapper[4612]: I1203 08:00:48.631641 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 08:00:48 crc kubenswrapper[4612]: I1203 08:00:48.631669 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 08:00:48 crc kubenswrapper[4612]: I1203 08:00:48.631680 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bcf7\" (UniqueName: \"kubernetes.io/projected/18e6e5c3-b90b-429e-9b89-c94e3f20ecdd-kube-api-access-4bcf7\") on node \"crc\" DevicePath \"\"" Dec 03 08:00:48 crc kubenswrapper[4612]: I1203 08:00:48.948393 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" event={"ID":"18e6e5c3-b90b-429e-9b89-c94e3f20ecdd","Type":"ContainerDied","Data":"fc78a770b10473fbe57ce9cbb54e705214cf48c2c1ef27b7aba6add95042410e"} Dec 03 08:00:48 crc kubenswrapper[4612]: I1203 08:00:48.948875 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc78a770b10473fbe57ce9cbb54e705214cf48c2c1ef27b7aba6add95042410e" Dec 03 08:00:48 crc kubenswrapper[4612]: I1203 08:00:48.948464 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vwxdw" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.036910 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf"] Dec 03 08:00:49 crc kubenswrapper[4612]: E1203 08:00:49.037420 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18e6e5c3-b90b-429e-9b89-c94e3f20ecdd" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.037442 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="18e6e5c3-b90b-429e-9b89-c94e3f20ecdd" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.037742 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="18e6e5c3-b90b-429e-9b89-c94e3f20ecdd" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.038559 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.041348 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.042599 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.045429 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.048314 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.054652 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf"] Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.140580 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqlgq\" (UniqueName: \"kubernetes.io/projected/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-kube-api-access-wqlgq\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf\" (UID: \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.140659 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf\" (UID: \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.140680 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf\" (UID: \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.242396 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqlgq\" (UniqueName: \"kubernetes.io/projected/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-kube-api-access-wqlgq\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf\" (UID: \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.242480 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf\" (UID: \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.242503 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf\" (UID: \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.247744 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf\" (UID: \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.252499 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf\" (UID: \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.260235 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqlgq\" (UniqueName: \"kubernetes.io/projected/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-kube-api-access-wqlgq\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf\" (UID: \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.356400 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" Dec 03 08:00:49 crc kubenswrapper[4612]: W1203 08:00:49.879464 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ad5399d_3f69_4f51_bc8c_9245e721bcfd.slice/crio-4835c5c69acd1ae56c4c61bff3773db746d64265052fc887d883ed78366c7af5 WatchSource:0}: Error finding container 4835c5c69acd1ae56c4c61bff3773db746d64265052fc887d883ed78366c7af5: Status 404 returned error can't find the container with id 4835c5c69acd1ae56c4c61bff3773db746d64265052fc887d883ed78366c7af5 Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.882320 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf"] Dec 03 08:00:49 crc kubenswrapper[4612]: I1203 08:00:49.958285 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" event={"ID":"4ad5399d-3f69-4f51-bc8c-9245e721bcfd","Type":"ContainerStarted","Data":"4835c5c69acd1ae56c4c61bff3773db746d64265052fc887d883ed78366c7af5"} Dec 03 08:00:50 crc kubenswrapper[4612]: I1203 08:00:50.983130 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" event={"ID":"4ad5399d-3f69-4f51-bc8c-9245e721bcfd","Type":"ContainerStarted","Data":"a3b4bf4c2ca19b5cb5b81df0ed609719ee0f2b854efad35f74c9b8d15a6eb9f0"} Dec 03 08:00:51 crc kubenswrapper[4612]: I1203 08:00:51.009276 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" podStartSLOduration=1.557205106 podStartE2EDuration="2.009258099s" podCreationTimestamp="2025-12-03 08:00:49 +0000 UTC" firstStartedPulling="2025-12-03 08:00:49.881425895 +0000 UTC m=+2013.054783295" lastFinishedPulling="2025-12-03 08:00:50.333478878 +0000 UTC m=+2013.506836288" observedRunningTime="2025-12-03 08:00:51.001190458 +0000 UTC m=+2014.174547908" watchObservedRunningTime="2025-12-03 08:00:51.009258099 +0000 UTC m=+2014.182615509" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.151997 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29412481-m7pjg"] Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.153579 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.180476 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29412481-m7pjg"] Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.187854 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hklwg\" (UniqueName: \"kubernetes.io/projected/9594636b-c035-4a21-94d7-b54b4e73ef55-kube-api-access-hklwg\") pod \"keystone-cron-29412481-m7pjg\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.187993 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-combined-ca-bundle\") pod \"keystone-cron-29412481-m7pjg\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.188027 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-config-data\") pod \"keystone-cron-29412481-m7pjg\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.188070 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-fernet-keys\") pod \"keystone-cron-29412481-m7pjg\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.289231 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-combined-ca-bundle\") pod \"keystone-cron-29412481-m7pjg\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.289286 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-config-data\") pod \"keystone-cron-29412481-m7pjg\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.289323 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-fernet-keys\") pod \"keystone-cron-29412481-m7pjg\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.291037 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hklwg\" (UniqueName: \"kubernetes.io/projected/9594636b-c035-4a21-94d7-b54b4e73ef55-kube-api-access-hklwg\") pod \"keystone-cron-29412481-m7pjg\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.296710 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-combined-ca-bundle\") pod \"keystone-cron-29412481-m7pjg\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.297271 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-fernet-keys\") pod \"keystone-cron-29412481-m7pjg\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.310040 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-config-data\") pod \"keystone-cron-29412481-m7pjg\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.312315 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hklwg\" (UniqueName: \"kubernetes.io/projected/9594636b-c035-4a21-94d7-b54b4e73ef55-kube-api-access-hklwg\") pod \"keystone-cron-29412481-m7pjg\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.474233 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:00 crc kubenswrapper[4612]: I1203 08:01:00.938288 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29412481-m7pjg"] Dec 03 08:01:01 crc kubenswrapper[4612]: I1203 08:01:01.075585 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412481-m7pjg" event={"ID":"9594636b-c035-4a21-94d7-b54b4e73ef55","Type":"ContainerStarted","Data":"3b890615c19f467974a4bf658248234f3df58c5e27102a513bef1d8906015de6"} Dec 03 08:01:02 crc kubenswrapper[4612]: I1203 08:01:02.084749 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412481-m7pjg" event={"ID":"9594636b-c035-4a21-94d7-b54b4e73ef55","Type":"ContainerStarted","Data":"1c0b0ea9e6f3afaa9721f282209cb24a4b781d2c82a6f54c681eb3f11c826736"} Dec 03 08:01:02 crc kubenswrapper[4612]: I1203 08:01:02.086732 4612 generic.go:334] "Generic (PLEG): container finished" podID="4ad5399d-3f69-4f51-bc8c-9245e721bcfd" containerID="a3b4bf4c2ca19b5cb5b81df0ed609719ee0f2b854efad35f74c9b8d15a6eb9f0" exitCode=0 Dec 03 08:01:02 crc kubenswrapper[4612]: I1203 08:01:02.086766 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" event={"ID":"4ad5399d-3f69-4f51-bc8c-9245e721bcfd","Type":"ContainerDied","Data":"a3b4bf4c2ca19b5cb5b81df0ed609719ee0f2b854efad35f74c9b8d15a6eb9f0"} Dec 03 08:01:02 crc kubenswrapper[4612]: I1203 08:01:02.106207 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29412481-m7pjg" podStartSLOduration=2.106188126 podStartE2EDuration="2.106188126s" podCreationTimestamp="2025-12-03 08:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:01:02.099269053 +0000 UTC m=+2025.272626463" watchObservedRunningTime="2025-12-03 08:01:02.106188126 +0000 UTC m=+2025.279545526" Dec 03 08:01:03 crc kubenswrapper[4612]: I1203 08:01:03.476181 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" Dec 03 08:01:03 crc kubenswrapper[4612]: I1203 08:01:03.658931 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-inventory\") pod \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\" (UID: \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\") " Dec 03 08:01:03 crc kubenswrapper[4612]: I1203 08:01:03.659228 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqlgq\" (UniqueName: \"kubernetes.io/projected/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-kube-api-access-wqlgq\") pod \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\" (UID: \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\") " Dec 03 08:01:03 crc kubenswrapper[4612]: I1203 08:01:03.659908 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-ssh-key\") pod \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\" (UID: \"4ad5399d-3f69-4f51-bc8c-9245e721bcfd\") " Dec 03 08:01:03 crc kubenswrapper[4612]: I1203 08:01:03.669795 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-kube-api-access-wqlgq" (OuterVolumeSpecName: "kube-api-access-wqlgq") pod "4ad5399d-3f69-4f51-bc8c-9245e721bcfd" (UID: "4ad5399d-3f69-4f51-bc8c-9245e721bcfd"). InnerVolumeSpecName "kube-api-access-wqlgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:01:03 crc kubenswrapper[4612]: I1203 08:01:03.688365 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4ad5399d-3f69-4f51-bc8c-9245e721bcfd" (UID: "4ad5399d-3f69-4f51-bc8c-9245e721bcfd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:01:03 crc kubenswrapper[4612]: I1203 08:01:03.692375 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-inventory" (OuterVolumeSpecName: "inventory") pod "4ad5399d-3f69-4f51-bc8c-9245e721bcfd" (UID: "4ad5399d-3f69-4f51-bc8c-9245e721bcfd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:01:03 crc kubenswrapper[4612]: I1203 08:01:03.763319 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqlgq\" (UniqueName: \"kubernetes.io/projected/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-kube-api-access-wqlgq\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:03 crc kubenswrapper[4612]: I1203 08:01:03.763357 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:03 crc kubenswrapper[4612]: I1203 08:01:03.763370 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ad5399d-3f69-4f51-bc8c-9245e721bcfd-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.116872 4612 generic.go:334] "Generic (PLEG): container finished" podID="9594636b-c035-4a21-94d7-b54b4e73ef55" containerID="1c0b0ea9e6f3afaa9721f282209cb24a4b781d2c82a6f54c681eb3f11c826736" exitCode=0 Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.117004 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412481-m7pjg" event={"ID":"9594636b-c035-4a21-94d7-b54b4e73ef55","Type":"ContainerDied","Data":"1c0b0ea9e6f3afaa9721f282209cb24a4b781d2c82a6f54c681eb3f11c826736"} Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.123092 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" event={"ID":"4ad5399d-3f69-4f51-bc8c-9245e721bcfd","Type":"ContainerDied","Data":"4835c5c69acd1ae56c4c61bff3773db746d64265052fc887d883ed78366c7af5"} Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.123149 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4835c5c69acd1ae56c4c61bff3773db746d64265052fc887d883ed78366c7af5" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.123250 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.221134 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj"] Dec 03 08:01:04 crc kubenswrapper[4612]: E1203 08:01:04.221494 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ad5399d-3f69-4f51-bc8c-9245e721bcfd" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.221511 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ad5399d-3f69-4f51-bc8c-9245e721bcfd" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.221688 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ad5399d-3f69-4f51-bc8c-9245e721bcfd" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.222316 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.228264 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.228426 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.228474 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.228603 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.228771 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.228924 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.229281 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.229523 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.234999 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj"] Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.372733 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.373130 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.373165 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.373219 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.373520 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.373558 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.373617 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmbj4\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-kube-api-access-vmbj4\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.373642 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.373674 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.373706 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.373730 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.373766 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.373832 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.373859 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.475582 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.476072 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.476247 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.476406 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.476541 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.477758 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.478009 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmbj4\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-kube-api-access-vmbj4\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.478192 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.479545 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.479629 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.479670 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.479695 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.479841 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.479878 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.480482 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.481171 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.482222 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.485413 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.487992 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.488205 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.488684 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.489284 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.490404 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.491207 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.492730 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.495539 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.496258 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.499064 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmbj4\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-kube-api-access-vmbj4\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:04 crc kubenswrapper[4612]: I1203 08:01:04.541855 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.067930 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj"] Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.133671 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" event={"ID":"237ab75b-84d9-4bd7-b235-2073221081f2","Type":"ContainerStarted","Data":"79aab788133532e1411b07bf0f9e483445cdadb30c7c6f15182e455c6482b837"} Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.533326 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.709443 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-combined-ca-bundle\") pod \"9594636b-c035-4a21-94d7-b54b4e73ef55\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.709920 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hklwg\" (UniqueName: \"kubernetes.io/projected/9594636b-c035-4a21-94d7-b54b4e73ef55-kube-api-access-hklwg\") pod \"9594636b-c035-4a21-94d7-b54b4e73ef55\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.709970 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-fernet-keys\") pod \"9594636b-c035-4a21-94d7-b54b4e73ef55\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.710010 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-config-data\") pod \"9594636b-c035-4a21-94d7-b54b4e73ef55\" (UID: \"9594636b-c035-4a21-94d7-b54b4e73ef55\") " Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.715647 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9594636b-c035-4a21-94d7-b54b4e73ef55" (UID: "9594636b-c035-4a21-94d7-b54b4e73ef55"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.720055 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9594636b-c035-4a21-94d7-b54b4e73ef55-kube-api-access-hklwg" (OuterVolumeSpecName: "kube-api-access-hklwg") pod "9594636b-c035-4a21-94d7-b54b4e73ef55" (UID: "9594636b-c035-4a21-94d7-b54b4e73ef55"). InnerVolumeSpecName "kube-api-access-hklwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.750195 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9594636b-c035-4a21-94d7-b54b4e73ef55" (UID: "9594636b-c035-4a21-94d7-b54b4e73ef55"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.795342 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-config-data" (OuterVolumeSpecName: "config-data") pod "9594636b-c035-4a21-94d7-b54b4e73ef55" (UID: "9594636b-c035-4a21-94d7-b54b4e73ef55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.812740 4612 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.812767 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hklwg\" (UniqueName: \"kubernetes.io/projected/9594636b-c035-4a21-94d7-b54b4e73ef55-kube-api-access-hklwg\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.812778 4612 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:05 crc kubenswrapper[4612]: I1203 08:01:05.812785 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9594636b-c035-4a21-94d7-b54b4e73ef55-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:06 crc kubenswrapper[4612]: I1203 08:01:06.143996 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412481-m7pjg" event={"ID":"9594636b-c035-4a21-94d7-b54b4e73ef55","Type":"ContainerDied","Data":"3b890615c19f467974a4bf658248234f3df58c5e27102a513bef1d8906015de6"} Dec 03 08:01:06 crc kubenswrapper[4612]: I1203 08:01:06.144090 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b890615c19f467974a4bf658248234f3df58c5e27102a513bef1d8906015de6" Dec 03 08:01:06 crc kubenswrapper[4612]: I1203 08:01:06.144126 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412481-m7pjg" Dec 03 08:01:06 crc kubenswrapper[4612]: I1203 08:01:06.145586 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" event={"ID":"237ab75b-84d9-4bd7-b235-2073221081f2","Type":"ContainerStarted","Data":"127363cf4a40c167ac4fabbe2f3364ef7f4357db4928c6f8a33bf4ab58057eb9"} Dec 03 08:01:06 crc kubenswrapper[4612]: I1203 08:01:06.181920 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" podStartSLOduration=1.5873862380000001 podStartE2EDuration="2.181895749s" podCreationTimestamp="2025-12-03 08:01:04 +0000 UTC" firstStartedPulling="2025-12-03 08:01:05.073424619 +0000 UTC m=+2028.246782029" lastFinishedPulling="2025-12-03 08:01:05.66793415 +0000 UTC m=+2028.841291540" observedRunningTime="2025-12-03 08:01:06.176812652 +0000 UTC m=+2029.350170082" watchObservedRunningTime="2025-12-03 08:01:06.181895749 +0000 UTC m=+2029.355253169" Dec 03 08:01:17 crc kubenswrapper[4612]: I1203 08:01:17.135719 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:01:17 crc kubenswrapper[4612]: I1203 08:01:17.136452 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:01:17 crc kubenswrapper[4612]: I1203 08:01:17.136523 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 08:01:17 crc kubenswrapper[4612]: I1203 08:01:17.137530 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f927977d7c370ea006286e23ee308504eb691fc6127f52fc2dd3867d9ba39ea2"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:01:17 crc kubenswrapper[4612]: I1203 08:01:17.137615 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://f927977d7c370ea006286e23ee308504eb691fc6127f52fc2dd3867d9ba39ea2" gracePeriod=600 Dec 03 08:01:18 crc kubenswrapper[4612]: I1203 08:01:18.265817 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="f927977d7c370ea006286e23ee308504eb691fc6127f52fc2dd3867d9ba39ea2" exitCode=0 Dec 03 08:01:18 crc kubenswrapper[4612]: I1203 08:01:18.266552 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"f927977d7c370ea006286e23ee308504eb691fc6127f52fc2dd3867d9ba39ea2"} Dec 03 08:01:18 crc kubenswrapper[4612]: I1203 08:01:18.266589 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb"} Dec 03 08:01:18 crc kubenswrapper[4612]: I1203 08:01:18.266614 4612 scope.go:117] "RemoveContainer" containerID="4c615ec15d2d5e143e8bfb397291da04fba3c7dd3a000e6b16ad2d2ad674356b" Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.392408 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5rdp4"] Dec 03 08:01:50 crc kubenswrapper[4612]: E1203 08:01:50.393360 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9594636b-c035-4a21-94d7-b54b4e73ef55" containerName="keystone-cron" Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.393375 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="9594636b-c035-4a21-94d7-b54b4e73ef55" containerName="keystone-cron" Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.393633 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="9594636b-c035-4a21-94d7-b54b4e73ef55" containerName="keystone-cron" Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.395488 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.424417 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5rdp4"] Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.448439 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppnbl\" (UniqueName: \"kubernetes.io/projected/c1723d82-b2b1-4a88-8c66-5fe66524d69e-kube-api-access-ppnbl\") pod \"certified-operators-5rdp4\" (UID: \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\") " pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.449110 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1723d82-b2b1-4a88-8c66-5fe66524d69e-catalog-content\") pod \"certified-operators-5rdp4\" (UID: \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\") " pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.449238 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1723d82-b2b1-4a88-8c66-5fe66524d69e-utilities\") pod \"certified-operators-5rdp4\" (UID: \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\") " pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.551570 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1723d82-b2b1-4a88-8c66-5fe66524d69e-catalog-content\") pod \"certified-operators-5rdp4\" (UID: \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\") " pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.551808 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1723d82-b2b1-4a88-8c66-5fe66524d69e-utilities\") pod \"certified-operators-5rdp4\" (UID: \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\") " pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.551924 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppnbl\" (UniqueName: \"kubernetes.io/projected/c1723d82-b2b1-4a88-8c66-5fe66524d69e-kube-api-access-ppnbl\") pod \"certified-operators-5rdp4\" (UID: \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\") " pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.552049 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1723d82-b2b1-4a88-8c66-5fe66524d69e-catalog-content\") pod \"certified-operators-5rdp4\" (UID: \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\") " pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.552328 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1723d82-b2b1-4a88-8c66-5fe66524d69e-utilities\") pod \"certified-operators-5rdp4\" (UID: \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\") " pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.570923 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppnbl\" (UniqueName: \"kubernetes.io/projected/c1723d82-b2b1-4a88-8c66-5fe66524d69e-kube-api-access-ppnbl\") pod \"certified-operators-5rdp4\" (UID: \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\") " pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:01:50 crc kubenswrapper[4612]: I1203 08:01:50.779962 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:01:51 crc kubenswrapper[4612]: I1203 08:01:51.325876 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5rdp4"] Dec 03 08:01:51 crc kubenswrapper[4612]: I1203 08:01:51.607407 4612 generic.go:334] "Generic (PLEG): container finished" podID="c1723d82-b2b1-4a88-8c66-5fe66524d69e" containerID="30bd8d6d6f1302b3a3b1d4c5670e392eaadd9c34b6b62999e9b7b0bb640d4075" exitCode=0 Dec 03 08:01:51 crc kubenswrapper[4612]: I1203 08:01:51.607506 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5rdp4" event={"ID":"c1723d82-b2b1-4a88-8c66-5fe66524d69e","Type":"ContainerDied","Data":"30bd8d6d6f1302b3a3b1d4c5670e392eaadd9c34b6b62999e9b7b0bb640d4075"} Dec 03 08:01:51 crc kubenswrapper[4612]: I1203 08:01:51.607663 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5rdp4" event={"ID":"c1723d82-b2b1-4a88-8c66-5fe66524d69e","Type":"ContainerStarted","Data":"10499b5465274ed08cb4050150da81b2e3771c7775f8a3f3a475a7b3ac178ef3"} Dec 03 08:01:52 crc kubenswrapper[4612]: I1203 08:01:52.620554 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5rdp4" event={"ID":"c1723d82-b2b1-4a88-8c66-5fe66524d69e","Type":"ContainerStarted","Data":"d8fb0aa8a6253c609faee84caf898956c4ec5db35cbeb04f0d8b59f38267f343"} Dec 03 08:01:52 crc kubenswrapper[4612]: I1203 08:01:52.622359 4612 generic.go:334] "Generic (PLEG): container finished" podID="237ab75b-84d9-4bd7-b235-2073221081f2" containerID="127363cf4a40c167ac4fabbe2f3364ef7f4357db4928c6f8a33bf4ab58057eb9" exitCode=0 Dec 03 08:01:52 crc kubenswrapper[4612]: I1203 08:01:52.622413 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" event={"ID":"237ab75b-84d9-4bd7-b235-2073221081f2","Type":"ContainerDied","Data":"127363cf4a40c167ac4fabbe2f3364ef7f4357db4928c6f8a33bf4ab58057eb9"} Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.356507 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.436647 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-nova-combined-ca-bundle\") pod \"237ab75b-84d9-4bd7-b235-2073221081f2\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.436713 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"237ab75b-84d9-4bd7-b235-2073221081f2\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.436755 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"237ab75b-84d9-4bd7-b235-2073221081f2\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.436785 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-ssh-key\") pod \"237ab75b-84d9-4bd7-b235-2073221081f2\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.436829 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-ovn-combined-ca-bundle\") pod \"237ab75b-84d9-4bd7-b235-2073221081f2\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.436904 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-inventory\") pod \"237ab75b-84d9-4bd7-b235-2073221081f2\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.436934 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmbj4\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-kube-api-access-vmbj4\") pod \"237ab75b-84d9-4bd7-b235-2073221081f2\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.436986 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-ovn-default-certs-0\") pod \"237ab75b-84d9-4bd7-b235-2073221081f2\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.437028 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-neutron-metadata-combined-ca-bundle\") pod \"237ab75b-84d9-4bd7-b235-2073221081f2\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.437091 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-bootstrap-combined-ca-bundle\") pod \"237ab75b-84d9-4bd7-b235-2073221081f2\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.437115 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-repo-setup-combined-ca-bundle\") pod \"237ab75b-84d9-4bd7-b235-2073221081f2\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.437199 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-libvirt-combined-ca-bundle\") pod \"237ab75b-84d9-4bd7-b235-2073221081f2\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.437225 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"237ab75b-84d9-4bd7-b235-2073221081f2\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.437281 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-telemetry-combined-ca-bundle\") pod \"237ab75b-84d9-4bd7-b235-2073221081f2\" (UID: \"237ab75b-84d9-4bd7-b235-2073221081f2\") " Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.443278 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "237ab75b-84d9-4bd7-b235-2073221081f2" (UID: "237ab75b-84d9-4bd7-b235-2073221081f2"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.443560 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "237ab75b-84d9-4bd7-b235-2073221081f2" (UID: "237ab75b-84d9-4bd7-b235-2073221081f2"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.443587 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "237ab75b-84d9-4bd7-b235-2073221081f2" (UID: "237ab75b-84d9-4bd7-b235-2073221081f2"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.444505 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "237ab75b-84d9-4bd7-b235-2073221081f2" (UID: "237ab75b-84d9-4bd7-b235-2073221081f2"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.445578 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "237ab75b-84d9-4bd7-b235-2073221081f2" (UID: "237ab75b-84d9-4bd7-b235-2073221081f2"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.446681 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "237ab75b-84d9-4bd7-b235-2073221081f2" (UID: "237ab75b-84d9-4bd7-b235-2073221081f2"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.446734 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "237ab75b-84d9-4bd7-b235-2073221081f2" (UID: "237ab75b-84d9-4bd7-b235-2073221081f2"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.447428 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "237ab75b-84d9-4bd7-b235-2073221081f2" (UID: "237ab75b-84d9-4bd7-b235-2073221081f2"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.447789 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "237ab75b-84d9-4bd7-b235-2073221081f2" (UID: "237ab75b-84d9-4bd7-b235-2073221081f2"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.449266 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "237ab75b-84d9-4bd7-b235-2073221081f2" (UID: "237ab75b-84d9-4bd7-b235-2073221081f2"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.449331 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "237ab75b-84d9-4bd7-b235-2073221081f2" (UID: "237ab75b-84d9-4bd7-b235-2073221081f2"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.452099 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-kube-api-access-vmbj4" (OuterVolumeSpecName: "kube-api-access-vmbj4") pod "237ab75b-84d9-4bd7-b235-2073221081f2" (UID: "237ab75b-84d9-4bd7-b235-2073221081f2"). InnerVolumeSpecName "kube-api-access-vmbj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.474456 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-inventory" (OuterVolumeSpecName: "inventory") pod "237ab75b-84d9-4bd7-b235-2073221081f2" (UID: "237ab75b-84d9-4bd7-b235-2073221081f2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.474724 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "237ab75b-84d9-4bd7-b235-2073221081f2" (UID: "237ab75b-84d9-4bd7-b235-2073221081f2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.540545 4612 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.540664 4612 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.540734 4612 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.540789 4612 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.540848 4612 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.540902 4612 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.540977 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.541084 4612 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.541149 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.541242 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmbj4\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-kube-api-access-vmbj4\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.541314 4612 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/237ab75b-84d9-4bd7-b235-2073221081f2-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.541372 4612 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.541540 4612 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.541743 4612 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237ab75b-84d9-4bd7-b235-2073221081f2-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.646919 4612 generic.go:334] "Generic (PLEG): container finished" podID="c1723d82-b2b1-4a88-8c66-5fe66524d69e" containerID="d8fb0aa8a6253c609faee84caf898956c4ec5db35cbeb04f0d8b59f38267f343" exitCode=0 Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.646980 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5rdp4" event={"ID":"c1723d82-b2b1-4a88-8c66-5fe66524d69e","Type":"ContainerDied","Data":"d8fb0aa8a6253c609faee84caf898956c4ec5db35cbeb04f0d8b59f38267f343"} Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.650031 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" event={"ID":"237ab75b-84d9-4bd7-b235-2073221081f2","Type":"ContainerDied","Data":"79aab788133532e1411b07bf0f9e483445cdadb30c7c6f15182e455c6482b837"} Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.650057 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79aab788133532e1411b07bf0f9e483445cdadb30c7c6f15182e455c6482b837" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.650148 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.865430 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk"] Dec 03 08:01:54 crc kubenswrapper[4612]: E1203 08:01:54.866117 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="237ab75b-84d9-4bd7-b235-2073221081f2" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.866150 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="237ab75b-84d9-4bd7-b235-2073221081f2" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.866517 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="237ab75b-84d9-4bd7-b235-2073221081f2" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.867611 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.870540 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.870966 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.871375 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.871648 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.872255 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.876476 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk"] Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.950627 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.950706 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4w2v\" (UniqueName: \"kubernetes.io/projected/44b9b74b-2985-47c3-aec4-304bfc2d6122-kube-api-access-m4w2v\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.950772 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.950810 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:54 crc kubenswrapper[4612]: I1203 08:01:54.950883 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/44b9b74b-2985-47c3-aec4-304bfc2d6122-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:55 crc kubenswrapper[4612]: I1203 08:01:55.052344 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/44b9b74b-2985-47c3-aec4-304bfc2d6122-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:55 crc kubenswrapper[4612]: I1203 08:01:55.052790 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:55 crc kubenswrapper[4612]: I1203 08:01:55.052914 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4w2v\" (UniqueName: \"kubernetes.io/projected/44b9b74b-2985-47c3-aec4-304bfc2d6122-kube-api-access-m4w2v\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:55 crc kubenswrapper[4612]: I1203 08:01:55.053094 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:55 crc kubenswrapper[4612]: I1203 08:01:55.053243 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:55 crc kubenswrapper[4612]: I1203 08:01:55.053847 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/44b9b74b-2985-47c3-aec4-304bfc2d6122-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:55 crc kubenswrapper[4612]: I1203 08:01:55.056778 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:55 crc kubenswrapper[4612]: I1203 08:01:55.057257 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:55 crc kubenswrapper[4612]: I1203 08:01:55.057916 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:55 crc kubenswrapper[4612]: I1203 08:01:55.077634 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4w2v\" (UniqueName: \"kubernetes.io/projected/44b9b74b-2985-47c3-aec4-304bfc2d6122-kube-api-access-m4w2v\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6ztrk\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:55 crc kubenswrapper[4612]: I1203 08:01:55.195356 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:01:55 crc kubenswrapper[4612]: I1203 08:01:55.661132 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5rdp4" event={"ID":"c1723d82-b2b1-4a88-8c66-5fe66524d69e","Type":"ContainerStarted","Data":"2f623caaed73602a1852c9118692b14b8b7cd3817eed84ee3341e822d36e2e34"} Dec 03 08:01:55 crc kubenswrapper[4612]: I1203 08:01:55.685514 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5rdp4" podStartSLOduration=2.156844117 podStartE2EDuration="5.685498284s" podCreationTimestamp="2025-12-03 08:01:50 +0000 UTC" firstStartedPulling="2025-12-03 08:01:51.611610146 +0000 UTC m=+2074.784967536" lastFinishedPulling="2025-12-03 08:01:55.140264303 +0000 UTC m=+2078.313621703" observedRunningTime="2025-12-03 08:01:55.682439787 +0000 UTC m=+2078.855797197" watchObservedRunningTime="2025-12-03 08:01:55.685498284 +0000 UTC m=+2078.858855684" Dec 03 08:01:55 crc kubenswrapper[4612]: I1203 08:01:55.820586 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk"] Dec 03 08:01:56 crc kubenswrapper[4612]: I1203 08:01:56.672429 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" event={"ID":"44b9b74b-2985-47c3-aec4-304bfc2d6122","Type":"ContainerStarted","Data":"ddf96e30513bb85fef3c0fcf81841b3be27ee891c7b2d8790ea1b5f7710ae07e"} Dec 03 08:01:56 crc kubenswrapper[4612]: I1203 08:01:56.672831 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" event={"ID":"44b9b74b-2985-47c3-aec4-304bfc2d6122","Type":"ContainerStarted","Data":"aacdb6f7089b75afa547667f1db5ecc135f07dde392fcc4ff6f7959dd0298762"} Dec 03 08:01:56 crc kubenswrapper[4612]: I1203 08:01:56.691458 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" podStartSLOduration=2.160203032 podStartE2EDuration="2.691439572s" podCreationTimestamp="2025-12-03 08:01:54 +0000 UTC" firstStartedPulling="2025-12-03 08:01:55.83387551 +0000 UTC m=+2079.007232910" lastFinishedPulling="2025-12-03 08:01:56.36511204 +0000 UTC m=+2079.538469450" observedRunningTime="2025-12-03 08:01:56.690496988 +0000 UTC m=+2079.863854398" watchObservedRunningTime="2025-12-03 08:01:56.691439572 +0000 UTC m=+2079.864796972" Dec 03 08:02:00 crc kubenswrapper[4612]: I1203 08:02:00.781259 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:02:00 crc kubenswrapper[4612]: I1203 08:02:00.781834 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:02:00 crc kubenswrapper[4612]: I1203 08:02:00.844778 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:02:01 crc kubenswrapper[4612]: I1203 08:02:01.822616 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:02:01 crc kubenswrapper[4612]: I1203 08:02:01.884464 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5rdp4"] Dec 03 08:02:03 crc kubenswrapper[4612]: I1203 08:02:03.752379 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5rdp4" podUID="c1723d82-b2b1-4a88-8c66-5fe66524d69e" containerName="registry-server" containerID="cri-o://2f623caaed73602a1852c9118692b14b8b7cd3817eed84ee3341e822d36e2e34" gracePeriod=2 Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.338359 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.453832 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppnbl\" (UniqueName: \"kubernetes.io/projected/c1723d82-b2b1-4a88-8c66-5fe66524d69e-kube-api-access-ppnbl\") pod \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\" (UID: \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\") " Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.453999 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1723d82-b2b1-4a88-8c66-5fe66524d69e-catalog-content\") pod \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\" (UID: \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\") " Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.454029 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1723d82-b2b1-4a88-8c66-5fe66524d69e-utilities\") pod \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\" (UID: \"c1723d82-b2b1-4a88-8c66-5fe66524d69e\") " Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.455177 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1723d82-b2b1-4a88-8c66-5fe66524d69e-utilities" (OuterVolumeSpecName: "utilities") pod "c1723d82-b2b1-4a88-8c66-5fe66524d69e" (UID: "c1723d82-b2b1-4a88-8c66-5fe66524d69e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.474583 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1723d82-b2b1-4a88-8c66-5fe66524d69e-kube-api-access-ppnbl" (OuterVolumeSpecName: "kube-api-access-ppnbl") pod "c1723d82-b2b1-4a88-8c66-5fe66524d69e" (UID: "c1723d82-b2b1-4a88-8c66-5fe66524d69e"). InnerVolumeSpecName "kube-api-access-ppnbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.511872 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1723d82-b2b1-4a88-8c66-5fe66524d69e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1723d82-b2b1-4a88-8c66-5fe66524d69e" (UID: "c1723d82-b2b1-4a88-8c66-5fe66524d69e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.555721 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppnbl\" (UniqueName: \"kubernetes.io/projected/c1723d82-b2b1-4a88-8c66-5fe66524d69e-kube-api-access-ppnbl\") on node \"crc\" DevicePath \"\"" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.555762 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1723d82-b2b1-4a88-8c66-5fe66524d69e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.555772 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1723d82-b2b1-4a88-8c66-5fe66524d69e-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.764735 4612 generic.go:334] "Generic (PLEG): container finished" podID="c1723d82-b2b1-4a88-8c66-5fe66524d69e" containerID="2f623caaed73602a1852c9118692b14b8b7cd3817eed84ee3341e822d36e2e34" exitCode=0 Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.764824 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5rdp4" event={"ID":"c1723d82-b2b1-4a88-8c66-5fe66524d69e","Type":"ContainerDied","Data":"2f623caaed73602a1852c9118692b14b8b7cd3817eed84ee3341e822d36e2e34"} Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.764926 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5rdp4" event={"ID":"c1723d82-b2b1-4a88-8c66-5fe66524d69e","Type":"ContainerDied","Data":"10499b5465274ed08cb4050150da81b2e3771c7775f8a3f3a475a7b3ac178ef3"} Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.764993 4612 scope.go:117] "RemoveContainer" containerID="2f623caaed73602a1852c9118692b14b8b7cd3817eed84ee3341e822d36e2e34" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.764858 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5rdp4" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.788404 4612 scope.go:117] "RemoveContainer" containerID="d8fb0aa8a6253c609faee84caf898956c4ec5db35cbeb04f0d8b59f38267f343" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.821972 4612 scope.go:117] "RemoveContainer" containerID="30bd8d6d6f1302b3a3b1d4c5670e392eaadd9c34b6b62999e9b7b0bb640d4075" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.838374 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5rdp4"] Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.856662 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5rdp4"] Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.873342 4612 scope.go:117] "RemoveContainer" containerID="2f623caaed73602a1852c9118692b14b8b7cd3817eed84ee3341e822d36e2e34" Dec 03 08:02:04 crc kubenswrapper[4612]: E1203 08:02:04.874077 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f623caaed73602a1852c9118692b14b8b7cd3817eed84ee3341e822d36e2e34\": container with ID starting with 2f623caaed73602a1852c9118692b14b8b7cd3817eed84ee3341e822d36e2e34 not found: ID does not exist" containerID="2f623caaed73602a1852c9118692b14b8b7cd3817eed84ee3341e822d36e2e34" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.874244 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f623caaed73602a1852c9118692b14b8b7cd3817eed84ee3341e822d36e2e34"} err="failed to get container status \"2f623caaed73602a1852c9118692b14b8b7cd3817eed84ee3341e822d36e2e34\": rpc error: code = NotFound desc = could not find container \"2f623caaed73602a1852c9118692b14b8b7cd3817eed84ee3341e822d36e2e34\": container with ID starting with 2f623caaed73602a1852c9118692b14b8b7cd3817eed84ee3341e822d36e2e34 not found: ID does not exist" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.874391 4612 scope.go:117] "RemoveContainer" containerID="d8fb0aa8a6253c609faee84caf898956c4ec5db35cbeb04f0d8b59f38267f343" Dec 03 08:02:04 crc kubenswrapper[4612]: E1203 08:02:04.874882 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8fb0aa8a6253c609faee84caf898956c4ec5db35cbeb04f0d8b59f38267f343\": container with ID starting with d8fb0aa8a6253c609faee84caf898956c4ec5db35cbeb04f0d8b59f38267f343 not found: ID does not exist" containerID="d8fb0aa8a6253c609faee84caf898956c4ec5db35cbeb04f0d8b59f38267f343" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.875040 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8fb0aa8a6253c609faee84caf898956c4ec5db35cbeb04f0d8b59f38267f343"} err="failed to get container status \"d8fb0aa8a6253c609faee84caf898956c4ec5db35cbeb04f0d8b59f38267f343\": rpc error: code = NotFound desc = could not find container \"d8fb0aa8a6253c609faee84caf898956c4ec5db35cbeb04f0d8b59f38267f343\": container with ID starting with d8fb0aa8a6253c609faee84caf898956c4ec5db35cbeb04f0d8b59f38267f343 not found: ID does not exist" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.875137 4612 scope.go:117] "RemoveContainer" containerID="30bd8d6d6f1302b3a3b1d4c5670e392eaadd9c34b6b62999e9b7b0bb640d4075" Dec 03 08:02:04 crc kubenswrapper[4612]: E1203 08:02:04.875556 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30bd8d6d6f1302b3a3b1d4c5670e392eaadd9c34b6b62999e9b7b0bb640d4075\": container with ID starting with 30bd8d6d6f1302b3a3b1d4c5670e392eaadd9c34b6b62999e9b7b0bb640d4075 not found: ID does not exist" containerID="30bd8d6d6f1302b3a3b1d4c5670e392eaadd9c34b6b62999e9b7b0bb640d4075" Dec 03 08:02:04 crc kubenswrapper[4612]: I1203 08:02:04.875676 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30bd8d6d6f1302b3a3b1d4c5670e392eaadd9c34b6b62999e9b7b0bb640d4075"} err="failed to get container status \"30bd8d6d6f1302b3a3b1d4c5670e392eaadd9c34b6b62999e9b7b0bb640d4075\": rpc error: code = NotFound desc = could not find container \"30bd8d6d6f1302b3a3b1d4c5670e392eaadd9c34b6b62999e9b7b0bb640d4075\": container with ID starting with 30bd8d6d6f1302b3a3b1d4c5670e392eaadd9c34b6b62999e9b7b0bb640d4075 not found: ID does not exist" Dec 03 08:02:05 crc kubenswrapper[4612]: I1203 08:02:05.105812 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1723d82-b2b1-4a88-8c66-5fe66524d69e" path="/var/lib/kubelet/pods/c1723d82-b2b1-4a88-8c66-5fe66524d69e/volumes" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.078254 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jqpmv"] Dec 03 08:02:41 crc kubenswrapper[4612]: E1203 08:02:41.079399 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1723d82-b2b1-4a88-8c66-5fe66524d69e" containerName="extract-content" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.079420 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1723d82-b2b1-4a88-8c66-5fe66524d69e" containerName="extract-content" Dec 03 08:02:41 crc kubenswrapper[4612]: E1203 08:02:41.079456 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1723d82-b2b1-4a88-8c66-5fe66524d69e" containerName="extract-utilities" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.079467 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1723d82-b2b1-4a88-8c66-5fe66524d69e" containerName="extract-utilities" Dec 03 08:02:41 crc kubenswrapper[4612]: E1203 08:02:41.079528 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1723d82-b2b1-4a88-8c66-5fe66524d69e" containerName="registry-server" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.079540 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1723d82-b2b1-4a88-8c66-5fe66524d69e" containerName="registry-server" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.079845 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1723d82-b2b1-4a88-8c66-5fe66524d69e" containerName="registry-server" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.086378 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.108785 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jqpmv"] Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.240430 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr4vm\" (UniqueName: \"kubernetes.io/projected/a8924bf1-cc74-4e33-ad2a-1a136da2e761-kube-api-access-dr4vm\") pod \"redhat-operators-jqpmv\" (UID: \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\") " pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.240508 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8924bf1-cc74-4e33-ad2a-1a136da2e761-utilities\") pod \"redhat-operators-jqpmv\" (UID: \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\") " pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.240611 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8924bf1-cc74-4e33-ad2a-1a136da2e761-catalog-content\") pod \"redhat-operators-jqpmv\" (UID: \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\") " pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.342417 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8924bf1-cc74-4e33-ad2a-1a136da2e761-catalog-content\") pod \"redhat-operators-jqpmv\" (UID: \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\") " pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.342592 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr4vm\" (UniqueName: \"kubernetes.io/projected/a8924bf1-cc74-4e33-ad2a-1a136da2e761-kube-api-access-dr4vm\") pod \"redhat-operators-jqpmv\" (UID: \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\") " pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.342656 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8924bf1-cc74-4e33-ad2a-1a136da2e761-utilities\") pod \"redhat-operators-jqpmv\" (UID: \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\") " pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.343180 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8924bf1-cc74-4e33-ad2a-1a136da2e761-catalog-content\") pod \"redhat-operators-jqpmv\" (UID: \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\") " pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.343761 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8924bf1-cc74-4e33-ad2a-1a136da2e761-utilities\") pod \"redhat-operators-jqpmv\" (UID: \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\") " pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.375779 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr4vm\" (UniqueName: \"kubernetes.io/projected/a8924bf1-cc74-4e33-ad2a-1a136da2e761-kube-api-access-dr4vm\") pod \"redhat-operators-jqpmv\" (UID: \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\") " pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.408865 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:02:41 crc kubenswrapper[4612]: I1203 08:02:41.908326 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jqpmv"] Dec 03 08:02:41 crc kubenswrapper[4612]: W1203 08:02:41.920182 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8924bf1_cc74_4e33_ad2a_1a136da2e761.slice/crio-44aaf29cf2469f5f0baae1c0b3552c6cad3829256caef05e08d8da3bff9b9439 WatchSource:0}: Error finding container 44aaf29cf2469f5f0baae1c0b3552c6cad3829256caef05e08d8da3bff9b9439: Status 404 returned error can't find the container with id 44aaf29cf2469f5f0baae1c0b3552c6cad3829256caef05e08d8da3bff9b9439 Dec 03 08:02:42 crc kubenswrapper[4612]: I1203 08:02:42.124548 4612 generic.go:334] "Generic (PLEG): container finished" podID="a8924bf1-cc74-4e33-ad2a-1a136da2e761" containerID="84023011ca397a12c340009ef45847b554f1097d58be08387057ef207679dc98" exitCode=0 Dec 03 08:02:42 crc kubenswrapper[4612]: I1203 08:02:42.124591 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jqpmv" event={"ID":"a8924bf1-cc74-4e33-ad2a-1a136da2e761","Type":"ContainerDied","Data":"84023011ca397a12c340009ef45847b554f1097d58be08387057ef207679dc98"} Dec 03 08:02:42 crc kubenswrapper[4612]: I1203 08:02:42.124616 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jqpmv" event={"ID":"a8924bf1-cc74-4e33-ad2a-1a136da2e761","Type":"ContainerStarted","Data":"44aaf29cf2469f5f0baae1c0b3552c6cad3829256caef05e08d8da3bff9b9439"} Dec 03 08:02:44 crc kubenswrapper[4612]: I1203 08:02:44.148145 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jqpmv" event={"ID":"a8924bf1-cc74-4e33-ad2a-1a136da2e761","Type":"ContainerStarted","Data":"239144bf14a5ff6d6104e40fc8714085085e44a6856545606448a9f8ba6611e0"} Dec 03 08:02:48 crc kubenswrapper[4612]: I1203 08:02:48.192810 4612 generic.go:334] "Generic (PLEG): container finished" podID="a8924bf1-cc74-4e33-ad2a-1a136da2e761" containerID="239144bf14a5ff6d6104e40fc8714085085e44a6856545606448a9f8ba6611e0" exitCode=0 Dec 03 08:02:48 crc kubenswrapper[4612]: I1203 08:02:48.192880 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jqpmv" event={"ID":"a8924bf1-cc74-4e33-ad2a-1a136da2e761","Type":"ContainerDied","Data":"239144bf14a5ff6d6104e40fc8714085085e44a6856545606448a9f8ba6611e0"} Dec 03 08:02:49 crc kubenswrapper[4612]: I1203 08:02:49.220754 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jqpmv" event={"ID":"a8924bf1-cc74-4e33-ad2a-1a136da2e761","Type":"ContainerStarted","Data":"7ef28c79f2bdcc4ec6e3aed4c892b58a2d6a1c6b05e225c26433ab45cdc8b0ef"} Dec 03 08:02:49 crc kubenswrapper[4612]: I1203 08:02:49.241221 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jqpmv" podStartSLOduration=1.7294722949999999 podStartE2EDuration="8.241203651s" podCreationTimestamp="2025-12-03 08:02:41 +0000 UTC" firstStartedPulling="2025-12-03 08:02:42.126300887 +0000 UTC m=+2125.299658287" lastFinishedPulling="2025-12-03 08:02:48.638032203 +0000 UTC m=+2131.811389643" observedRunningTime="2025-12-03 08:02:49.237231252 +0000 UTC m=+2132.410588652" watchObservedRunningTime="2025-12-03 08:02:49.241203651 +0000 UTC m=+2132.414561061" Dec 03 08:02:51 crc kubenswrapper[4612]: I1203 08:02:51.410075 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:02:51 crc kubenswrapper[4612]: I1203 08:02:51.410533 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:02:52 crc kubenswrapper[4612]: I1203 08:02:52.463141 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jqpmv" podUID="a8924bf1-cc74-4e33-ad2a-1a136da2e761" containerName="registry-server" probeResult="failure" output=< Dec 03 08:02:52 crc kubenswrapper[4612]: timeout: failed to connect service ":50051" within 1s Dec 03 08:02:52 crc kubenswrapper[4612]: > Dec 03 08:03:01 crc kubenswrapper[4612]: I1203 08:03:01.484644 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:03:01 crc kubenswrapper[4612]: I1203 08:03:01.557860 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:03:01 crc kubenswrapper[4612]: I1203 08:03:01.721112 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jqpmv"] Dec 03 08:03:03 crc kubenswrapper[4612]: I1203 08:03:03.368482 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jqpmv" podUID="a8924bf1-cc74-4e33-ad2a-1a136da2e761" containerName="registry-server" containerID="cri-o://7ef28c79f2bdcc4ec6e3aed4c892b58a2d6a1c6b05e225c26433ab45cdc8b0ef" gracePeriod=2 Dec 03 08:03:03 crc kubenswrapper[4612]: I1203 08:03:03.864544 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:03:03 crc kubenswrapper[4612]: I1203 08:03:03.919809 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8924bf1-cc74-4e33-ad2a-1a136da2e761-catalog-content\") pod \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\" (UID: \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\") " Dec 03 08:03:03 crc kubenswrapper[4612]: I1203 08:03:03.919943 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dr4vm\" (UniqueName: \"kubernetes.io/projected/a8924bf1-cc74-4e33-ad2a-1a136da2e761-kube-api-access-dr4vm\") pod \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\" (UID: \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\") " Dec 03 08:03:03 crc kubenswrapper[4612]: I1203 08:03:03.920009 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8924bf1-cc74-4e33-ad2a-1a136da2e761-utilities\") pod \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\" (UID: \"a8924bf1-cc74-4e33-ad2a-1a136da2e761\") " Dec 03 08:03:03 crc kubenswrapper[4612]: I1203 08:03:03.920984 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8924bf1-cc74-4e33-ad2a-1a136da2e761-utilities" (OuterVolumeSpecName: "utilities") pod "a8924bf1-cc74-4e33-ad2a-1a136da2e761" (UID: "a8924bf1-cc74-4e33-ad2a-1a136da2e761"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:03:03 crc kubenswrapper[4612]: I1203 08:03:03.925646 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8924bf1-cc74-4e33-ad2a-1a136da2e761-kube-api-access-dr4vm" (OuterVolumeSpecName: "kube-api-access-dr4vm") pod "a8924bf1-cc74-4e33-ad2a-1a136da2e761" (UID: "a8924bf1-cc74-4e33-ad2a-1a136da2e761"). InnerVolumeSpecName "kube-api-access-dr4vm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.022397 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8924bf1-cc74-4e33-ad2a-1a136da2e761-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.022449 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dr4vm\" (UniqueName: \"kubernetes.io/projected/a8924bf1-cc74-4e33-ad2a-1a136da2e761-kube-api-access-dr4vm\") on node \"crc\" DevicePath \"\"" Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.030586 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8924bf1-cc74-4e33-ad2a-1a136da2e761-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8924bf1-cc74-4e33-ad2a-1a136da2e761" (UID: "a8924bf1-cc74-4e33-ad2a-1a136da2e761"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.124259 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8924bf1-cc74-4e33-ad2a-1a136da2e761-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.379751 4612 generic.go:334] "Generic (PLEG): container finished" podID="a8924bf1-cc74-4e33-ad2a-1a136da2e761" containerID="7ef28c79f2bdcc4ec6e3aed4c892b58a2d6a1c6b05e225c26433ab45cdc8b0ef" exitCode=0 Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.379805 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jqpmv" event={"ID":"a8924bf1-cc74-4e33-ad2a-1a136da2e761","Type":"ContainerDied","Data":"7ef28c79f2bdcc4ec6e3aed4c892b58a2d6a1c6b05e225c26433ab45cdc8b0ef"} Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.379867 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jqpmv" event={"ID":"a8924bf1-cc74-4e33-ad2a-1a136da2e761","Type":"ContainerDied","Data":"44aaf29cf2469f5f0baae1c0b3552c6cad3829256caef05e08d8da3bff9b9439"} Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.379890 4612 scope.go:117] "RemoveContainer" containerID="7ef28c79f2bdcc4ec6e3aed4c892b58a2d6a1c6b05e225c26433ab45cdc8b0ef" Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.379823 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jqpmv" Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.412392 4612 scope.go:117] "RemoveContainer" containerID="239144bf14a5ff6d6104e40fc8714085085e44a6856545606448a9f8ba6611e0" Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.427608 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jqpmv"] Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.438719 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jqpmv"] Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.460018 4612 scope.go:117] "RemoveContainer" containerID="84023011ca397a12c340009ef45847b554f1097d58be08387057ef207679dc98" Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.497099 4612 scope.go:117] "RemoveContainer" containerID="7ef28c79f2bdcc4ec6e3aed4c892b58a2d6a1c6b05e225c26433ab45cdc8b0ef" Dec 03 08:03:04 crc kubenswrapper[4612]: E1203 08:03:04.497977 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ef28c79f2bdcc4ec6e3aed4c892b58a2d6a1c6b05e225c26433ab45cdc8b0ef\": container with ID starting with 7ef28c79f2bdcc4ec6e3aed4c892b58a2d6a1c6b05e225c26433ab45cdc8b0ef not found: ID does not exist" containerID="7ef28c79f2bdcc4ec6e3aed4c892b58a2d6a1c6b05e225c26433ab45cdc8b0ef" Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.498033 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ef28c79f2bdcc4ec6e3aed4c892b58a2d6a1c6b05e225c26433ab45cdc8b0ef"} err="failed to get container status \"7ef28c79f2bdcc4ec6e3aed4c892b58a2d6a1c6b05e225c26433ab45cdc8b0ef\": rpc error: code = NotFound desc = could not find container \"7ef28c79f2bdcc4ec6e3aed4c892b58a2d6a1c6b05e225c26433ab45cdc8b0ef\": container with ID starting with 7ef28c79f2bdcc4ec6e3aed4c892b58a2d6a1c6b05e225c26433ab45cdc8b0ef not found: ID does not exist" Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.498064 4612 scope.go:117] "RemoveContainer" containerID="239144bf14a5ff6d6104e40fc8714085085e44a6856545606448a9f8ba6611e0" Dec 03 08:03:04 crc kubenswrapper[4612]: E1203 08:03:04.498463 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"239144bf14a5ff6d6104e40fc8714085085e44a6856545606448a9f8ba6611e0\": container with ID starting with 239144bf14a5ff6d6104e40fc8714085085e44a6856545606448a9f8ba6611e0 not found: ID does not exist" containerID="239144bf14a5ff6d6104e40fc8714085085e44a6856545606448a9f8ba6611e0" Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.498507 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"239144bf14a5ff6d6104e40fc8714085085e44a6856545606448a9f8ba6611e0"} err="failed to get container status \"239144bf14a5ff6d6104e40fc8714085085e44a6856545606448a9f8ba6611e0\": rpc error: code = NotFound desc = could not find container \"239144bf14a5ff6d6104e40fc8714085085e44a6856545606448a9f8ba6611e0\": container with ID starting with 239144bf14a5ff6d6104e40fc8714085085e44a6856545606448a9f8ba6611e0 not found: ID does not exist" Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.498535 4612 scope.go:117] "RemoveContainer" containerID="84023011ca397a12c340009ef45847b554f1097d58be08387057ef207679dc98" Dec 03 08:03:04 crc kubenswrapper[4612]: E1203 08:03:04.498881 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84023011ca397a12c340009ef45847b554f1097d58be08387057ef207679dc98\": container with ID starting with 84023011ca397a12c340009ef45847b554f1097d58be08387057ef207679dc98 not found: ID does not exist" containerID="84023011ca397a12c340009ef45847b554f1097d58be08387057ef207679dc98" Dec 03 08:03:04 crc kubenswrapper[4612]: I1203 08:03:04.499015 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84023011ca397a12c340009ef45847b554f1097d58be08387057ef207679dc98"} err="failed to get container status \"84023011ca397a12c340009ef45847b554f1097d58be08387057ef207679dc98\": rpc error: code = NotFound desc = could not find container \"84023011ca397a12c340009ef45847b554f1097d58be08387057ef207679dc98\": container with ID starting with 84023011ca397a12c340009ef45847b554f1097d58be08387057ef207679dc98 not found: ID does not exist" Dec 03 08:03:05 crc kubenswrapper[4612]: I1203 08:03:05.109991 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8924bf1-cc74-4e33-ad2a-1a136da2e761" path="/var/lib/kubelet/pods/a8924bf1-cc74-4e33-ad2a-1a136da2e761/volumes" Dec 03 08:03:11 crc kubenswrapper[4612]: I1203 08:03:11.459700 4612 generic.go:334] "Generic (PLEG): container finished" podID="44b9b74b-2985-47c3-aec4-304bfc2d6122" containerID="ddf96e30513bb85fef3c0fcf81841b3be27ee891c7b2d8790ea1b5f7710ae07e" exitCode=0 Dec 03 08:03:11 crc kubenswrapper[4612]: I1203 08:03:11.459814 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" event={"ID":"44b9b74b-2985-47c3-aec4-304bfc2d6122","Type":"ContainerDied","Data":"ddf96e30513bb85fef3c0fcf81841b3be27ee891c7b2d8790ea1b5f7710ae07e"} Dec 03 08:03:12 crc kubenswrapper[4612]: I1203 08:03:12.976823 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.031359 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-ssh-key\") pod \"44b9b74b-2985-47c3-aec4-304bfc2d6122\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.031443 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4w2v\" (UniqueName: \"kubernetes.io/projected/44b9b74b-2985-47c3-aec4-304bfc2d6122-kube-api-access-m4w2v\") pod \"44b9b74b-2985-47c3-aec4-304bfc2d6122\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.031628 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/44b9b74b-2985-47c3-aec4-304bfc2d6122-ovncontroller-config-0\") pod \"44b9b74b-2985-47c3-aec4-304bfc2d6122\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.031745 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-inventory\") pod \"44b9b74b-2985-47c3-aec4-304bfc2d6122\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.031883 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-ovn-combined-ca-bundle\") pod \"44b9b74b-2985-47c3-aec4-304bfc2d6122\" (UID: \"44b9b74b-2985-47c3-aec4-304bfc2d6122\") " Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.040240 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "44b9b74b-2985-47c3-aec4-304bfc2d6122" (UID: "44b9b74b-2985-47c3-aec4-304bfc2d6122"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.040626 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44b9b74b-2985-47c3-aec4-304bfc2d6122-kube-api-access-m4w2v" (OuterVolumeSpecName: "kube-api-access-m4w2v") pod "44b9b74b-2985-47c3-aec4-304bfc2d6122" (UID: "44b9b74b-2985-47c3-aec4-304bfc2d6122"). InnerVolumeSpecName "kube-api-access-m4w2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.067524 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "44b9b74b-2985-47c3-aec4-304bfc2d6122" (UID: "44b9b74b-2985-47c3-aec4-304bfc2d6122"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.074659 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44b9b74b-2985-47c3-aec4-304bfc2d6122-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "44b9b74b-2985-47c3-aec4-304bfc2d6122" (UID: "44b9b74b-2985-47c3-aec4-304bfc2d6122"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.083745 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-inventory" (OuterVolumeSpecName: "inventory") pod "44b9b74b-2985-47c3-aec4-304bfc2d6122" (UID: "44b9b74b-2985-47c3-aec4-304bfc2d6122"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.135164 4612 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.135205 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.135216 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4w2v\" (UniqueName: \"kubernetes.io/projected/44b9b74b-2985-47c3-aec4-304bfc2d6122-kube-api-access-m4w2v\") on node \"crc\" DevicePath \"\"" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.135228 4612 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/44b9b74b-2985-47c3-aec4-304bfc2d6122-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.135240 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44b9b74b-2985-47c3-aec4-304bfc2d6122-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.494998 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" event={"ID":"44b9b74b-2985-47c3-aec4-304bfc2d6122","Type":"ContainerDied","Data":"aacdb6f7089b75afa547667f1db5ecc135f07dde392fcc4ff6f7959dd0298762"} Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.495082 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aacdb6f7089b75afa547667f1db5ecc135f07dde392fcc4ff6f7959dd0298762" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.495178 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6ztrk" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.598386 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s"] Dec 03 08:03:13 crc kubenswrapper[4612]: E1203 08:03:13.598920 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8924bf1-cc74-4e33-ad2a-1a136da2e761" containerName="extract-utilities" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.599042 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8924bf1-cc74-4e33-ad2a-1a136da2e761" containerName="extract-utilities" Dec 03 08:03:13 crc kubenswrapper[4612]: E1203 08:03:13.599127 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8924bf1-cc74-4e33-ad2a-1a136da2e761" containerName="registry-server" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.599198 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8924bf1-cc74-4e33-ad2a-1a136da2e761" containerName="registry-server" Dec 03 08:03:13 crc kubenswrapper[4612]: E1203 08:03:13.599265 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44b9b74b-2985-47c3-aec4-304bfc2d6122" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.599323 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="44b9b74b-2985-47c3-aec4-304bfc2d6122" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 08:03:13 crc kubenswrapper[4612]: E1203 08:03:13.599381 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8924bf1-cc74-4e33-ad2a-1a136da2e761" containerName="extract-content" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.599434 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8924bf1-cc74-4e33-ad2a-1a136da2e761" containerName="extract-content" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.599636 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8924bf1-cc74-4e33-ad2a-1a136da2e761" containerName="registry-server" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.599703 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="44b9b74b-2985-47c3-aec4-304bfc2d6122" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.600389 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.604493 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.604758 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.605039 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.606029 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.606179 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.606834 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.625286 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s"] Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.746043 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.746176 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.746274 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.746311 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.746424 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgw6w\" (UniqueName: \"kubernetes.io/projected/fe4761e2-fbfe-473c-bc56-fafd2d11559b-kube-api-access-wgw6w\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.746731 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.848734 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.849008 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.849117 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.849255 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgw6w\" (UniqueName: \"kubernetes.io/projected/fe4761e2-fbfe-473c-bc56-fafd2d11559b-kube-api-access-wgw6w\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.849414 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.849918 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.854775 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.855739 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.856455 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.856496 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.856941 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.872319 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgw6w\" (UniqueName: \"kubernetes.io/projected/fe4761e2-fbfe-473c-bc56-fafd2d11559b-kube-api-access-wgw6w\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:13 crc kubenswrapper[4612]: I1203 08:03:13.918236 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:03:14 crc kubenswrapper[4612]: I1203 08:03:14.448888 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s"] Dec 03 08:03:14 crc kubenswrapper[4612]: I1203 08:03:14.510375 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" event={"ID":"fe4761e2-fbfe-473c-bc56-fafd2d11559b","Type":"ContainerStarted","Data":"a8681b19b048941fa9b27c565fde92a386b6e16176ac6b479775d66b27da2b55"} Dec 03 08:03:15 crc kubenswrapper[4612]: I1203 08:03:15.520277 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" event={"ID":"fe4761e2-fbfe-473c-bc56-fafd2d11559b","Type":"ContainerStarted","Data":"da1a43f0b5ee4cd9f3880279eb6e7944fe2e9021ad87b53f41364999a772a06d"} Dec 03 08:03:15 crc kubenswrapper[4612]: I1203 08:03:15.551246 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" podStartSLOduration=2.011107697 podStartE2EDuration="2.551220528s" podCreationTimestamp="2025-12-03 08:03:13 +0000 UTC" firstStartedPulling="2025-12-03 08:03:14.464179564 +0000 UTC m=+2157.637536974" lastFinishedPulling="2025-12-03 08:03:15.004292365 +0000 UTC m=+2158.177649805" observedRunningTime="2025-12-03 08:03:15.542696175 +0000 UTC m=+2158.716053585" watchObservedRunningTime="2025-12-03 08:03:15.551220528 +0000 UTC m=+2158.724577958" Dec 03 08:03:17 crc kubenswrapper[4612]: I1203 08:03:17.135828 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:03:17 crc kubenswrapper[4612]: I1203 08:03:17.136134 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:03:27 crc kubenswrapper[4612]: I1203 08:03:27.221920 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pgttm"] Dec 03 08:03:27 crc kubenswrapper[4612]: I1203 08:03:27.224797 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:27 crc kubenswrapper[4612]: I1203 08:03:27.247278 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pgttm"] Dec 03 08:03:27 crc kubenswrapper[4612]: I1203 08:03:27.347034 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wcvx\" (UniqueName: \"kubernetes.io/projected/24f49f4a-0391-42b5-9ebb-a57ea3c175be-kube-api-access-6wcvx\") pod \"redhat-marketplace-pgttm\" (UID: \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\") " pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:27 crc kubenswrapper[4612]: I1203 08:03:27.347090 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24f49f4a-0391-42b5-9ebb-a57ea3c175be-utilities\") pod \"redhat-marketplace-pgttm\" (UID: \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\") " pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:27 crc kubenswrapper[4612]: I1203 08:03:27.347121 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24f49f4a-0391-42b5-9ebb-a57ea3c175be-catalog-content\") pod \"redhat-marketplace-pgttm\" (UID: \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\") " pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:27 crc kubenswrapper[4612]: I1203 08:03:27.448443 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wcvx\" (UniqueName: \"kubernetes.io/projected/24f49f4a-0391-42b5-9ebb-a57ea3c175be-kube-api-access-6wcvx\") pod \"redhat-marketplace-pgttm\" (UID: \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\") " pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:27 crc kubenswrapper[4612]: I1203 08:03:27.448709 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24f49f4a-0391-42b5-9ebb-a57ea3c175be-utilities\") pod \"redhat-marketplace-pgttm\" (UID: \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\") " pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:27 crc kubenswrapper[4612]: I1203 08:03:27.448740 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24f49f4a-0391-42b5-9ebb-a57ea3c175be-catalog-content\") pod \"redhat-marketplace-pgttm\" (UID: \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\") " pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:27 crc kubenswrapper[4612]: I1203 08:03:27.449378 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24f49f4a-0391-42b5-9ebb-a57ea3c175be-catalog-content\") pod \"redhat-marketplace-pgttm\" (UID: \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\") " pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:27 crc kubenswrapper[4612]: I1203 08:03:27.449431 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24f49f4a-0391-42b5-9ebb-a57ea3c175be-utilities\") pod \"redhat-marketplace-pgttm\" (UID: \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\") " pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:27 crc kubenswrapper[4612]: I1203 08:03:27.472931 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wcvx\" (UniqueName: \"kubernetes.io/projected/24f49f4a-0391-42b5-9ebb-a57ea3c175be-kube-api-access-6wcvx\") pod \"redhat-marketplace-pgttm\" (UID: \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\") " pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:27 crc kubenswrapper[4612]: I1203 08:03:27.594666 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:28 crc kubenswrapper[4612]: I1203 08:03:28.073620 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pgttm"] Dec 03 08:03:28 crc kubenswrapper[4612]: I1203 08:03:28.645551 4612 generic.go:334] "Generic (PLEG): container finished" podID="24f49f4a-0391-42b5-9ebb-a57ea3c175be" containerID="adc66249894556654b0ded431ae9a7b792bb95a891470742e184b9ba4d684ac7" exitCode=0 Dec 03 08:03:28 crc kubenswrapper[4612]: I1203 08:03:28.645819 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pgttm" event={"ID":"24f49f4a-0391-42b5-9ebb-a57ea3c175be","Type":"ContainerDied","Data":"adc66249894556654b0ded431ae9a7b792bb95a891470742e184b9ba4d684ac7"} Dec 03 08:03:28 crc kubenswrapper[4612]: I1203 08:03:28.645929 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pgttm" event={"ID":"24f49f4a-0391-42b5-9ebb-a57ea3c175be","Type":"ContainerStarted","Data":"4f6c4c237925630e0e61f4f606c0b13aafcab46dfcc6bc97184e6662f5feed8d"} Dec 03 08:03:29 crc kubenswrapper[4612]: I1203 08:03:29.658169 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pgttm" event={"ID":"24f49f4a-0391-42b5-9ebb-a57ea3c175be","Type":"ContainerStarted","Data":"c24e244a42c6cee7e2b62602125f02345a4a2772b4f4fcf6308579a866a13efa"} Dec 03 08:03:30 crc kubenswrapper[4612]: I1203 08:03:30.672917 4612 generic.go:334] "Generic (PLEG): container finished" podID="24f49f4a-0391-42b5-9ebb-a57ea3c175be" containerID="c24e244a42c6cee7e2b62602125f02345a4a2772b4f4fcf6308579a866a13efa" exitCode=0 Dec 03 08:03:30 crc kubenswrapper[4612]: I1203 08:03:30.673196 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pgttm" event={"ID":"24f49f4a-0391-42b5-9ebb-a57ea3c175be","Type":"ContainerDied","Data":"c24e244a42c6cee7e2b62602125f02345a4a2772b4f4fcf6308579a866a13efa"} Dec 03 08:03:31 crc kubenswrapper[4612]: I1203 08:03:31.683088 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pgttm" event={"ID":"24f49f4a-0391-42b5-9ebb-a57ea3c175be","Type":"ContainerStarted","Data":"c64b86bde0b0fef652281bb378c0a49e9507d354a60174638dc740b5d5c4d05e"} Dec 03 08:03:31 crc kubenswrapper[4612]: I1203 08:03:31.711558 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pgttm" podStartSLOduration=2.152807062 podStartE2EDuration="4.711543411s" podCreationTimestamp="2025-12-03 08:03:27 +0000 UTC" firstStartedPulling="2025-12-03 08:03:28.648646888 +0000 UTC m=+2171.822004288" lastFinishedPulling="2025-12-03 08:03:31.207383207 +0000 UTC m=+2174.380740637" observedRunningTime="2025-12-03 08:03:31.704251989 +0000 UTC m=+2174.877609399" watchObservedRunningTime="2025-12-03 08:03:31.711543411 +0000 UTC m=+2174.884900811" Dec 03 08:03:37 crc kubenswrapper[4612]: I1203 08:03:37.594812 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:37 crc kubenswrapper[4612]: I1203 08:03:37.595430 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:37 crc kubenswrapper[4612]: I1203 08:03:37.684994 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:37 crc kubenswrapper[4612]: I1203 08:03:37.793016 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:37 crc kubenswrapper[4612]: I1203 08:03:37.936423 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pgttm"] Dec 03 08:03:39 crc kubenswrapper[4612]: I1203 08:03:39.753667 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pgttm" podUID="24f49f4a-0391-42b5-9ebb-a57ea3c175be" containerName="registry-server" containerID="cri-o://c64b86bde0b0fef652281bb378c0a49e9507d354a60174638dc740b5d5c4d05e" gracePeriod=2 Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.270884 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.294181 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24f49f4a-0391-42b5-9ebb-a57ea3c175be-utilities\") pod \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\" (UID: \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\") " Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.294401 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wcvx\" (UniqueName: \"kubernetes.io/projected/24f49f4a-0391-42b5-9ebb-a57ea3c175be-kube-api-access-6wcvx\") pod \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\" (UID: \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\") " Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.294491 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24f49f4a-0391-42b5-9ebb-a57ea3c175be-catalog-content\") pod \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\" (UID: \"24f49f4a-0391-42b5-9ebb-a57ea3c175be\") " Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.295386 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24f49f4a-0391-42b5-9ebb-a57ea3c175be-utilities" (OuterVolumeSpecName: "utilities") pod "24f49f4a-0391-42b5-9ebb-a57ea3c175be" (UID: "24f49f4a-0391-42b5-9ebb-a57ea3c175be"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.300368 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24f49f4a-0391-42b5-9ebb-a57ea3c175be-kube-api-access-6wcvx" (OuterVolumeSpecName: "kube-api-access-6wcvx") pod "24f49f4a-0391-42b5-9ebb-a57ea3c175be" (UID: "24f49f4a-0391-42b5-9ebb-a57ea3c175be"). InnerVolumeSpecName "kube-api-access-6wcvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.327587 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24f49f4a-0391-42b5-9ebb-a57ea3c175be-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "24f49f4a-0391-42b5-9ebb-a57ea3c175be" (UID: "24f49f4a-0391-42b5-9ebb-a57ea3c175be"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.397519 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24f49f4a-0391-42b5-9ebb-a57ea3c175be-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.397564 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24f49f4a-0391-42b5-9ebb-a57ea3c175be-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.397591 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wcvx\" (UniqueName: \"kubernetes.io/projected/24f49f4a-0391-42b5-9ebb-a57ea3c175be-kube-api-access-6wcvx\") on node \"crc\" DevicePath \"\"" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.764408 4612 generic.go:334] "Generic (PLEG): container finished" podID="24f49f4a-0391-42b5-9ebb-a57ea3c175be" containerID="c64b86bde0b0fef652281bb378c0a49e9507d354a60174638dc740b5d5c4d05e" exitCode=0 Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.764789 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pgttm" event={"ID":"24f49f4a-0391-42b5-9ebb-a57ea3c175be","Type":"ContainerDied","Data":"c64b86bde0b0fef652281bb378c0a49e9507d354a60174638dc740b5d5c4d05e"} Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.764837 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pgttm" event={"ID":"24f49f4a-0391-42b5-9ebb-a57ea3c175be","Type":"ContainerDied","Data":"4f6c4c237925630e0e61f4f606c0b13aafcab46dfcc6bc97184e6662f5feed8d"} Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.764862 4612 scope.go:117] "RemoveContainer" containerID="c64b86bde0b0fef652281bb378c0a49e9507d354a60174638dc740b5d5c4d05e" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.765113 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pgttm" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.803426 4612 scope.go:117] "RemoveContainer" containerID="c24e244a42c6cee7e2b62602125f02345a4a2772b4f4fcf6308579a866a13efa" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.806515 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pgttm"] Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.820980 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pgttm"] Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.836586 4612 scope.go:117] "RemoveContainer" containerID="adc66249894556654b0ded431ae9a7b792bb95a891470742e184b9ba4d684ac7" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.890369 4612 scope.go:117] "RemoveContainer" containerID="c64b86bde0b0fef652281bb378c0a49e9507d354a60174638dc740b5d5c4d05e" Dec 03 08:03:40 crc kubenswrapper[4612]: E1203 08:03:40.890758 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c64b86bde0b0fef652281bb378c0a49e9507d354a60174638dc740b5d5c4d05e\": container with ID starting with c64b86bde0b0fef652281bb378c0a49e9507d354a60174638dc740b5d5c4d05e not found: ID does not exist" containerID="c64b86bde0b0fef652281bb378c0a49e9507d354a60174638dc740b5d5c4d05e" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.890814 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c64b86bde0b0fef652281bb378c0a49e9507d354a60174638dc740b5d5c4d05e"} err="failed to get container status \"c64b86bde0b0fef652281bb378c0a49e9507d354a60174638dc740b5d5c4d05e\": rpc error: code = NotFound desc = could not find container \"c64b86bde0b0fef652281bb378c0a49e9507d354a60174638dc740b5d5c4d05e\": container with ID starting with c64b86bde0b0fef652281bb378c0a49e9507d354a60174638dc740b5d5c4d05e not found: ID does not exist" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.890845 4612 scope.go:117] "RemoveContainer" containerID="c24e244a42c6cee7e2b62602125f02345a4a2772b4f4fcf6308579a866a13efa" Dec 03 08:03:40 crc kubenswrapper[4612]: E1203 08:03:40.891370 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c24e244a42c6cee7e2b62602125f02345a4a2772b4f4fcf6308579a866a13efa\": container with ID starting with c24e244a42c6cee7e2b62602125f02345a4a2772b4f4fcf6308579a866a13efa not found: ID does not exist" containerID="c24e244a42c6cee7e2b62602125f02345a4a2772b4f4fcf6308579a866a13efa" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.891400 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c24e244a42c6cee7e2b62602125f02345a4a2772b4f4fcf6308579a866a13efa"} err="failed to get container status \"c24e244a42c6cee7e2b62602125f02345a4a2772b4f4fcf6308579a866a13efa\": rpc error: code = NotFound desc = could not find container \"c24e244a42c6cee7e2b62602125f02345a4a2772b4f4fcf6308579a866a13efa\": container with ID starting with c24e244a42c6cee7e2b62602125f02345a4a2772b4f4fcf6308579a866a13efa not found: ID does not exist" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.891422 4612 scope.go:117] "RemoveContainer" containerID="adc66249894556654b0ded431ae9a7b792bb95a891470742e184b9ba4d684ac7" Dec 03 08:03:40 crc kubenswrapper[4612]: E1203 08:03:40.891703 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adc66249894556654b0ded431ae9a7b792bb95a891470742e184b9ba4d684ac7\": container with ID starting with adc66249894556654b0ded431ae9a7b792bb95a891470742e184b9ba4d684ac7 not found: ID does not exist" containerID="adc66249894556654b0ded431ae9a7b792bb95a891470742e184b9ba4d684ac7" Dec 03 08:03:40 crc kubenswrapper[4612]: I1203 08:03:40.891735 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adc66249894556654b0ded431ae9a7b792bb95a891470742e184b9ba4d684ac7"} err="failed to get container status \"adc66249894556654b0ded431ae9a7b792bb95a891470742e184b9ba4d684ac7\": rpc error: code = NotFound desc = could not find container \"adc66249894556654b0ded431ae9a7b792bb95a891470742e184b9ba4d684ac7\": container with ID starting with adc66249894556654b0ded431ae9a7b792bb95a891470742e184b9ba4d684ac7 not found: ID does not exist" Dec 03 08:03:41 crc kubenswrapper[4612]: I1203 08:03:41.100517 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24f49f4a-0391-42b5-9ebb-a57ea3c175be" path="/var/lib/kubelet/pods/24f49f4a-0391-42b5-9ebb-a57ea3c175be/volumes" Dec 03 08:03:47 crc kubenswrapper[4612]: I1203 08:03:47.135737 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:03:47 crc kubenswrapper[4612]: I1203 08:03:47.136394 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:04:13 crc kubenswrapper[4612]: I1203 08:04:13.193366 4612 generic.go:334] "Generic (PLEG): container finished" podID="fe4761e2-fbfe-473c-bc56-fafd2d11559b" containerID="da1a43f0b5ee4cd9f3880279eb6e7944fe2e9021ad87b53f41364999a772a06d" exitCode=0 Dec 03 08:04:13 crc kubenswrapper[4612]: I1203 08:04:13.193448 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" event={"ID":"fe4761e2-fbfe-473c-bc56-fafd2d11559b","Type":"ContainerDied","Data":"da1a43f0b5ee4cd9f3880279eb6e7944fe2e9021ad87b53f41364999a772a06d"} Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.617706 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.711176 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-nova-metadata-neutron-config-0\") pod \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.711233 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.711338 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgw6w\" (UniqueName: \"kubernetes.io/projected/fe4761e2-fbfe-473c-bc56-fafd2d11559b-kube-api-access-wgw6w\") pod \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.711364 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-ssh-key\") pod \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.711413 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-neutron-metadata-combined-ca-bundle\") pod \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.711437 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-inventory\") pod \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.717264 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "fe4761e2-fbfe-473c-bc56-fafd2d11559b" (UID: "fe4761e2-fbfe-473c-bc56-fafd2d11559b"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.717533 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe4761e2-fbfe-473c-bc56-fafd2d11559b-kube-api-access-wgw6w" (OuterVolumeSpecName: "kube-api-access-wgw6w") pod "fe4761e2-fbfe-473c-bc56-fafd2d11559b" (UID: "fe4761e2-fbfe-473c-bc56-fafd2d11559b"). InnerVolumeSpecName "kube-api-access-wgw6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:04:14 crc kubenswrapper[4612]: E1203 08:04:14.749310 4612 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-inventory podName:fe4761e2-fbfe-473c-bc56-fafd2d11559b nodeName:}" failed. No retries permitted until 2025-12-03 08:04:15.249279954 +0000 UTC m=+2218.422637364 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "inventory" (UniqueName: "kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-inventory") pod "fe4761e2-fbfe-473c-bc56-fafd2d11559b" (UID: "fe4761e2-fbfe-473c-bc56-fafd2d11559b") : error deleting /var/lib/kubelet/pods/fe4761e2-fbfe-473c-bc56-fafd2d11559b/volume-subpaths: remove /var/lib/kubelet/pods/fe4761e2-fbfe-473c-bc56-fafd2d11559b/volume-subpaths: no such file or directory Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.749390 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "fe4761e2-fbfe-473c-bc56-fafd2d11559b" (UID: "fe4761e2-fbfe-473c-bc56-fafd2d11559b"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.749449 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fe4761e2-fbfe-473c-bc56-fafd2d11559b" (UID: "fe4761e2-fbfe-473c-bc56-fafd2d11559b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.752791 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "fe4761e2-fbfe-473c-bc56-fafd2d11559b" (UID: "fe4761e2-fbfe-473c-bc56-fafd2d11559b"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.813217 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgw6w\" (UniqueName: \"kubernetes.io/projected/fe4761e2-fbfe-473c-bc56-fafd2d11559b-kube-api-access-wgw6w\") on node \"crc\" DevicePath \"\"" Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.813250 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.813263 4612 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.813276 4612 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 08:04:14 crc kubenswrapper[4612]: I1203 08:04:14.813291 4612 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.222027 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" event={"ID":"fe4761e2-fbfe-473c-bc56-fafd2d11559b","Type":"ContainerDied","Data":"a8681b19b048941fa9b27c565fde92a386b6e16176ac6b479775d66b27da2b55"} Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.222108 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8681b19b048941fa9b27c565fde92a386b6e16176ac6b479775d66b27da2b55" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.222220 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.323501 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-inventory\") pod \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\" (UID: \"fe4761e2-fbfe-473c-bc56-fafd2d11559b\") " Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.349399 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv"] Dec 03 08:04:15 crc kubenswrapper[4612]: E1203 08:04:15.349809 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24f49f4a-0391-42b5-9ebb-a57ea3c175be" containerName="registry-server" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.349828 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="24f49f4a-0391-42b5-9ebb-a57ea3c175be" containerName="registry-server" Dec 03 08:04:15 crc kubenswrapper[4612]: E1203 08:04:15.349867 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24f49f4a-0391-42b5-9ebb-a57ea3c175be" containerName="extract-utilities" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.349877 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="24f49f4a-0391-42b5-9ebb-a57ea3c175be" containerName="extract-utilities" Dec 03 08:04:15 crc kubenswrapper[4612]: E1203 08:04:15.349894 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24f49f4a-0391-42b5-9ebb-a57ea3c175be" containerName="extract-content" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.349903 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="24f49f4a-0391-42b5-9ebb-a57ea3c175be" containerName="extract-content" Dec 03 08:04:15 crc kubenswrapper[4612]: E1203 08:04:15.349921 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe4761e2-fbfe-473c-bc56-fafd2d11559b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.349930 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe4761e2-fbfe-473c-bc56-fafd2d11559b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.350174 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="24f49f4a-0391-42b5-9ebb-a57ea3c175be" containerName="registry-server" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.350208 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe4761e2-fbfe-473c-bc56-fafd2d11559b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.350927 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.355850 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.360395 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv"] Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.360568 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-inventory" (OuterVolumeSpecName: "inventory") pod "fe4761e2-fbfe-473c-bc56-fafd2d11559b" (UID: "fe4761e2-fbfe-473c-bc56-fafd2d11559b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.425662 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.425723 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.425754 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lnsn\" (UniqueName: \"kubernetes.io/projected/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-kube-api-access-6lnsn\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.425847 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.425874 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.425959 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe4761e2-fbfe-473c-bc56-fafd2d11559b-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.527470 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.527531 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.527585 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.527614 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.527639 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lnsn\" (UniqueName: \"kubernetes.io/projected/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-kube-api-access-6lnsn\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.530985 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.531483 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.532837 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.546586 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.546871 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lnsn\" (UniqueName: \"kubernetes.io/projected/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-kube-api-access-6lnsn\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:15 crc kubenswrapper[4612]: I1203 08:04:15.703323 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:04:16 crc kubenswrapper[4612]: I1203 08:04:16.071039 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv"] Dec 03 08:04:16 crc kubenswrapper[4612]: I1203 08:04:16.234295 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" event={"ID":"7f0ab99d-079d-4eda-8308-33f1a44d5c3b","Type":"ContainerStarted","Data":"d54ed20e86ee370b9871dc68d88303212bf23001751c3f0e7c031ee638398ab4"} Dec 03 08:04:17 crc kubenswrapper[4612]: I1203 08:04:17.136566 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:04:17 crc kubenswrapper[4612]: I1203 08:04:17.136867 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:04:17 crc kubenswrapper[4612]: I1203 08:04:17.136924 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 08:04:17 crc kubenswrapper[4612]: I1203 08:04:17.137675 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:04:17 crc kubenswrapper[4612]: I1203 08:04:17.137758 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" gracePeriod=600 Dec 03 08:04:17 crc kubenswrapper[4612]: I1203 08:04:17.243763 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" event={"ID":"7f0ab99d-079d-4eda-8308-33f1a44d5c3b","Type":"ContainerStarted","Data":"40a1da9601fbb7c8446fedf5972bcf588c0a44ab5e8f933471c8b87b18d85749"} Dec 03 08:04:17 crc kubenswrapper[4612]: E1203 08:04:17.264749 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:04:17 crc kubenswrapper[4612]: I1203 08:04:17.281670 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" podStartSLOduration=1.7276820750000002 podStartE2EDuration="2.281653314s" podCreationTimestamp="2025-12-03 08:04:15 +0000 UTC" firstStartedPulling="2025-12-03 08:04:16.079028361 +0000 UTC m=+2219.252385771" lastFinishedPulling="2025-12-03 08:04:16.63299961 +0000 UTC m=+2219.806357010" observedRunningTime="2025-12-03 08:04:17.278882515 +0000 UTC m=+2220.452239915" watchObservedRunningTime="2025-12-03 08:04:17.281653314 +0000 UTC m=+2220.455010714" Dec 03 08:04:18 crc kubenswrapper[4612]: I1203 08:04:18.253087 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" exitCode=0 Dec 03 08:04:18 crc kubenswrapper[4612]: I1203 08:04:18.253328 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb"} Dec 03 08:04:18 crc kubenswrapper[4612]: I1203 08:04:18.253546 4612 scope.go:117] "RemoveContainer" containerID="f927977d7c370ea006286e23ee308504eb691fc6127f52fc2dd3867d9ba39ea2" Dec 03 08:04:18 crc kubenswrapper[4612]: I1203 08:04:18.254592 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:04:18 crc kubenswrapper[4612]: E1203 08:04:18.255074 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:04:29 crc kubenswrapper[4612]: I1203 08:04:29.090777 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:04:29 crc kubenswrapper[4612]: E1203 08:04:29.091847 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:04:40 crc kubenswrapper[4612]: I1203 08:04:40.093192 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:04:40 crc kubenswrapper[4612]: E1203 08:04:40.095813 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:04:51 crc kubenswrapper[4612]: I1203 08:04:51.097282 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:04:51 crc kubenswrapper[4612]: E1203 08:04:51.098436 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:05:02 crc kubenswrapper[4612]: I1203 08:05:02.089287 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:05:02 crc kubenswrapper[4612]: E1203 08:05:02.090379 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:05:13 crc kubenswrapper[4612]: I1203 08:05:13.091672 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:05:13 crc kubenswrapper[4612]: E1203 08:05:13.092545 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:05:28 crc kubenswrapper[4612]: I1203 08:05:28.090273 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:05:28 crc kubenswrapper[4612]: E1203 08:05:28.091449 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:05:43 crc kubenswrapper[4612]: I1203 08:05:43.091057 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:05:43 crc kubenswrapper[4612]: E1203 08:05:43.092415 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:05:55 crc kubenswrapper[4612]: I1203 08:05:55.090054 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:05:55 crc kubenswrapper[4612]: E1203 08:05:55.091000 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:06:09 crc kubenswrapper[4612]: I1203 08:06:09.091932 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:06:09 crc kubenswrapper[4612]: E1203 08:06:09.093055 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:06:23 crc kubenswrapper[4612]: I1203 08:06:23.090437 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:06:23 crc kubenswrapper[4612]: E1203 08:06:23.091528 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:06:35 crc kubenswrapper[4612]: I1203 08:06:35.091484 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:06:35 crc kubenswrapper[4612]: E1203 08:06:35.092824 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:06:49 crc kubenswrapper[4612]: I1203 08:06:49.089908 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:06:49 crc kubenswrapper[4612]: E1203 08:06:49.091088 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:07:00 crc kubenswrapper[4612]: I1203 08:07:00.090118 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:07:00 crc kubenswrapper[4612]: E1203 08:07:00.091540 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:07:14 crc kubenswrapper[4612]: I1203 08:07:14.089096 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:07:14 crc kubenswrapper[4612]: E1203 08:07:14.090191 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:07:25 crc kubenswrapper[4612]: I1203 08:07:25.090104 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:07:25 crc kubenswrapper[4612]: E1203 08:07:25.091308 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:07:38 crc kubenswrapper[4612]: I1203 08:07:38.089927 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:07:38 crc kubenswrapper[4612]: E1203 08:07:38.091234 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:07:51 crc kubenswrapper[4612]: I1203 08:07:51.089464 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:07:51 crc kubenswrapper[4612]: E1203 08:07:51.090367 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:08:06 crc kubenswrapper[4612]: I1203 08:08:06.089767 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:08:06 crc kubenswrapper[4612]: E1203 08:08:06.090872 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:08:20 crc kubenswrapper[4612]: I1203 08:08:20.090990 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:08:20 crc kubenswrapper[4612]: E1203 08:08:20.091775 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:08:35 crc kubenswrapper[4612]: I1203 08:08:35.090339 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:08:35 crc kubenswrapper[4612]: E1203 08:08:35.091137 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:08:48 crc kubenswrapper[4612]: I1203 08:08:48.089495 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:08:48 crc kubenswrapper[4612]: E1203 08:08:48.090118 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.020791 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zlqzb"] Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.023316 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.054569 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zlqzb"] Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.175830 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e37abb80-69cc-4089-9261-6af9a7fdad42-utilities\") pod \"community-operators-zlqzb\" (UID: \"e37abb80-69cc-4089-9261-6af9a7fdad42\") " pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.176246 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxdv5\" (UniqueName: \"kubernetes.io/projected/e37abb80-69cc-4089-9261-6af9a7fdad42-kube-api-access-sxdv5\") pod \"community-operators-zlqzb\" (UID: \"e37abb80-69cc-4089-9261-6af9a7fdad42\") " pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.176291 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e37abb80-69cc-4089-9261-6af9a7fdad42-catalog-content\") pod \"community-operators-zlqzb\" (UID: \"e37abb80-69cc-4089-9261-6af9a7fdad42\") " pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.277628 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxdv5\" (UniqueName: \"kubernetes.io/projected/e37abb80-69cc-4089-9261-6af9a7fdad42-kube-api-access-sxdv5\") pod \"community-operators-zlqzb\" (UID: \"e37abb80-69cc-4089-9261-6af9a7fdad42\") " pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.277678 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e37abb80-69cc-4089-9261-6af9a7fdad42-catalog-content\") pod \"community-operators-zlqzb\" (UID: \"e37abb80-69cc-4089-9261-6af9a7fdad42\") " pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.277733 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e37abb80-69cc-4089-9261-6af9a7fdad42-utilities\") pod \"community-operators-zlqzb\" (UID: \"e37abb80-69cc-4089-9261-6af9a7fdad42\") " pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.278911 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e37abb80-69cc-4089-9261-6af9a7fdad42-catalog-content\") pod \"community-operators-zlqzb\" (UID: \"e37abb80-69cc-4089-9261-6af9a7fdad42\") " pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.279360 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e37abb80-69cc-4089-9261-6af9a7fdad42-utilities\") pod \"community-operators-zlqzb\" (UID: \"e37abb80-69cc-4089-9261-6af9a7fdad42\") " pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.309793 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxdv5\" (UniqueName: \"kubernetes.io/projected/e37abb80-69cc-4089-9261-6af9a7fdad42-kube-api-access-sxdv5\") pod \"community-operators-zlqzb\" (UID: \"e37abb80-69cc-4089-9261-6af9a7fdad42\") " pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.358653 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.757446 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zlqzb"] Dec 03 08:08:57 crc kubenswrapper[4612]: I1203 08:08:57.801904 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlqzb" event={"ID":"e37abb80-69cc-4089-9261-6af9a7fdad42","Type":"ContainerStarted","Data":"fe0779f6daa46b17c3a809fe90836d61e02c311afd7128faaf489345090cd8a9"} Dec 03 08:08:58 crc kubenswrapper[4612]: I1203 08:08:58.814108 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlqzb" event={"ID":"e37abb80-69cc-4089-9261-6af9a7fdad42","Type":"ContainerDied","Data":"20d97e36f4548d52e38bc48052d0d9f6a6253b0171ed207b28fd590fc049891a"} Dec 03 08:08:58 crc kubenswrapper[4612]: I1203 08:08:58.814219 4612 generic.go:334] "Generic (PLEG): container finished" podID="e37abb80-69cc-4089-9261-6af9a7fdad42" containerID="20d97e36f4548d52e38bc48052d0d9f6a6253b0171ed207b28fd590fc049891a" exitCode=0 Dec 03 08:08:58 crc kubenswrapper[4612]: I1203 08:08:58.816987 4612 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 08:08:59 crc kubenswrapper[4612]: I1203 08:08:59.091817 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:08:59 crc kubenswrapper[4612]: E1203 08:08:59.092299 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:08:59 crc kubenswrapper[4612]: I1203 08:08:59.826071 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlqzb" event={"ID":"e37abb80-69cc-4089-9261-6af9a7fdad42","Type":"ContainerStarted","Data":"51274114c00f111256f9cf1c1f89e98d9265fee23a62be08e07e841e1bda8a16"} Dec 03 08:09:00 crc kubenswrapper[4612]: I1203 08:09:00.835648 4612 generic.go:334] "Generic (PLEG): container finished" podID="e37abb80-69cc-4089-9261-6af9a7fdad42" containerID="51274114c00f111256f9cf1c1f89e98d9265fee23a62be08e07e841e1bda8a16" exitCode=0 Dec 03 08:09:00 crc kubenswrapper[4612]: I1203 08:09:00.835695 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlqzb" event={"ID":"e37abb80-69cc-4089-9261-6af9a7fdad42","Type":"ContainerDied","Data":"51274114c00f111256f9cf1c1f89e98d9265fee23a62be08e07e841e1bda8a16"} Dec 03 08:09:01 crc kubenswrapper[4612]: I1203 08:09:01.844435 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlqzb" event={"ID":"e37abb80-69cc-4089-9261-6af9a7fdad42","Type":"ContainerStarted","Data":"4546847ba327990441b56304c0414af016fbdfe6e75a5f56270459a5f82aab5e"} Dec 03 08:09:01 crc kubenswrapper[4612]: I1203 08:09:01.861677 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zlqzb" podStartSLOduration=3.30204826 podStartE2EDuration="5.86166377s" podCreationTimestamp="2025-12-03 08:08:56 +0000 UTC" firstStartedPulling="2025-12-03 08:08:58.816598746 +0000 UTC m=+2501.989956146" lastFinishedPulling="2025-12-03 08:09:01.376214216 +0000 UTC m=+2504.549571656" observedRunningTime="2025-12-03 08:09:01.857761692 +0000 UTC m=+2505.031119092" watchObservedRunningTime="2025-12-03 08:09:01.86166377 +0000 UTC m=+2505.035021170" Dec 03 08:09:07 crc kubenswrapper[4612]: I1203 08:09:07.359399 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:09:07 crc kubenswrapper[4612]: I1203 08:09:07.361684 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:09:07 crc kubenswrapper[4612]: I1203 08:09:07.466354 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:09:07 crc kubenswrapper[4612]: I1203 08:09:07.959576 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:09:08 crc kubenswrapper[4612]: I1203 08:09:08.028709 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zlqzb"] Dec 03 08:09:09 crc kubenswrapper[4612]: I1203 08:09:09.919639 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zlqzb" podUID="e37abb80-69cc-4089-9261-6af9a7fdad42" containerName="registry-server" containerID="cri-o://4546847ba327990441b56304c0414af016fbdfe6e75a5f56270459a5f82aab5e" gracePeriod=2 Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.425261 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.577532 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e37abb80-69cc-4089-9261-6af9a7fdad42-utilities\") pod \"e37abb80-69cc-4089-9261-6af9a7fdad42\" (UID: \"e37abb80-69cc-4089-9261-6af9a7fdad42\") " Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.578028 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxdv5\" (UniqueName: \"kubernetes.io/projected/e37abb80-69cc-4089-9261-6af9a7fdad42-kube-api-access-sxdv5\") pod \"e37abb80-69cc-4089-9261-6af9a7fdad42\" (UID: \"e37abb80-69cc-4089-9261-6af9a7fdad42\") " Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.578128 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e37abb80-69cc-4089-9261-6af9a7fdad42-catalog-content\") pod \"e37abb80-69cc-4089-9261-6af9a7fdad42\" (UID: \"e37abb80-69cc-4089-9261-6af9a7fdad42\") " Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.578411 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e37abb80-69cc-4089-9261-6af9a7fdad42-utilities" (OuterVolumeSpecName: "utilities") pod "e37abb80-69cc-4089-9261-6af9a7fdad42" (UID: "e37abb80-69cc-4089-9261-6af9a7fdad42"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.578847 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e37abb80-69cc-4089-9261-6af9a7fdad42-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.584499 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e37abb80-69cc-4089-9261-6af9a7fdad42-kube-api-access-sxdv5" (OuterVolumeSpecName: "kube-api-access-sxdv5") pod "e37abb80-69cc-4089-9261-6af9a7fdad42" (UID: "e37abb80-69cc-4089-9261-6af9a7fdad42"). InnerVolumeSpecName "kube-api-access-sxdv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.635703 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e37abb80-69cc-4089-9261-6af9a7fdad42-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e37abb80-69cc-4089-9261-6af9a7fdad42" (UID: "e37abb80-69cc-4089-9261-6af9a7fdad42"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.681090 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxdv5\" (UniqueName: \"kubernetes.io/projected/e37abb80-69cc-4089-9261-6af9a7fdad42-kube-api-access-sxdv5\") on node \"crc\" DevicePath \"\"" Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.681142 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e37abb80-69cc-4089-9261-6af9a7fdad42-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.933659 4612 generic.go:334] "Generic (PLEG): container finished" podID="e37abb80-69cc-4089-9261-6af9a7fdad42" containerID="4546847ba327990441b56304c0414af016fbdfe6e75a5f56270459a5f82aab5e" exitCode=0 Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.933730 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlqzb" event={"ID":"e37abb80-69cc-4089-9261-6af9a7fdad42","Type":"ContainerDied","Data":"4546847ba327990441b56304c0414af016fbdfe6e75a5f56270459a5f82aab5e"} Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.933774 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlqzb" event={"ID":"e37abb80-69cc-4089-9261-6af9a7fdad42","Type":"ContainerDied","Data":"fe0779f6daa46b17c3a809fe90836d61e02c311afd7128faaf489345090cd8a9"} Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.933804 4612 scope.go:117] "RemoveContainer" containerID="4546847ba327990441b56304c0414af016fbdfe6e75a5f56270459a5f82aab5e" Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.934070 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zlqzb" Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.971405 4612 scope.go:117] "RemoveContainer" containerID="51274114c00f111256f9cf1c1f89e98d9265fee23a62be08e07e841e1bda8a16" Dec 03 08:09:10 crc kubenswrapper[4612]: I1203 08:09:10.995017 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zlqzb"] Dec 03 08:09:11 crc kubenswrapper[4612]: I1203 08:09:11.006476 4612 scope.go:117] "RemoveContainer" containerID="20d97e36f4548d52e38bc48052d0d9f6a6253b0171ed207b28fd590fc049891a" Dec 03 08:09:11 crc kubenswrapper[4612]: I1203 08:09:11.015250 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zlqzb"] Dec 03 08:09:11 crc kubenswrapper[4612]: I1203 08:09:11.061709 4612 scope.go:117] "RemoveContainer" containerID="4546847ba327990441b56304c0414af016fbdfe6e75a5f56270459a5f82aab5e" Dec 03 08:09:11 crc kubenswrapper[4612]: E1203 08:09:11.062448 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4546847ba327990441b56304c0414af016fbdfe6e75a5f56270459a5f82aab5e\": container with ID starting with 4546847ba327990441b56304c0414af016fbdfe6e75a5f56270459a5f82aab5e not found: ID does not exist" containerID="4546847ba327990441b56304c0414af016fbdfe6e75a5f56270459a5f82aab5e" Dec 03 08:09:11 crc kubenswrapper[4612]: I1203 08:09:11.062493 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4546847ba327990441b56304c0414af016fbdfe6e75a5f56270459a5f82aab5e"} err="failed to get container status \"4546847ba327990441b56304c0414af016fbdfe6e75a5f56270459a5f82aab5e\": rpc error: code = NotFound desc = could not find container \"4546847ba327990441b56304c0414af016fbdfe6e75a5f56270459a5f82aab5e\": container with ID starting with 4546847ba327990441b56304c0414af016fbdfe6e75a5f56270459a5f82aab5e not found: ID does not exist" Dec 03 08:09:11 crc kubenswrapper[4612]: I1203 08:09:11.062526 4612 scope.go:117] "RemoveContainer" containerID="51274114c00f111256f9cf1c1f89e98d9265fee23a62be08e07e841e1bda8a16" Dec 03 08:09:11 crc kubenswrapper[4612]: E1203 08:09:11.063040 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51274114c00f111256f9cf1c1f89e98d9265fee23a62be08e07e841e1bda8a16\": container with ID starting with 51274114c00f111256f9cf1c1f89e98d9265fee23a62be08e07e841e1bda8a16 not found: ID does not exist" containerID="51274114c00f111256f9cf1c1f89e98d9265fee23a62be08e07e841e1bda8a16" Dec 03 08:09:11 crc kubenswrapper[4612]: I1203 08:09:11.063105 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51274114c00f111256f9cf1c1f89e98d9265fee23a62be08e07e841e1bda8a16"} err="failed to get container status \"51274114c00f111256f9cf1c1f89e98d9265fee23a62be08e07e841e1bda8a16\": rpc error: code = NotFound desc = could not find container \"51274114c00f111256f9cf1c1f89e98d9265fee23a62be08e07e841e1bda8a16\": container with ID starting with 51274114c00f111256f9cf1c1f89e98d9265fee23a62be08e07e841e1bda8a16 not found: ID does not exist" Dec 03 08:09:11 crc kubenswrapper[4612]: I1203 08:09:11.063150 4612 scope.go:117] "RemoveContainer" containerID="20d97e36f4548d52e38bc48052d0d9f6a6253b0171ed207b28fd590fc049891a" Dec 03 08:09:11 crc kubenswrapper[4612]: E1203 08:09:11.064003 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20d97e36f4548d52e38bc48052d0d9f6a6253b0171ed207b28fd590fc049891a\": container with ID starting with 20d97e36f4548d52e38bc48052d0d9f6a6253b0171ed207b28fd590fc049891a not found: ID does not exist" containerID="20d97e36f4548d52e38bc48052d0d9f6a6253b0171ed207b28fd590fc049891a" Dec 03 08:09:11 crc kubenswrapper[4612]: I1203 08:09:11.064116 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20d97e36f4548d52e38bc48052d0d9f6a6253b0171ed207b28fd590fc049891a"} err="failed to get container status \"20d97e36f4548d52e38bc48052d0d9f6a6253b0171ed207b28fd590fc049891a\": rpc error: code = NotFound desc = could not find container \"20d97e36f4548d52e38bc48052d0d9f6a6253b0171ed207b28fd590fc049891a\": container with ID starting with 20d97e36f4548d52e38bc48052d0d9f6a6253b0171ed207b28fd590fc049891a not found: ID does not exist" Dec 03 08:09:11 crc kubenswrapper[4612]: I1203 08:09:11.109289 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e37abb80-69cc-4089-9261-6af9a7fdad42" path="/var/lib/kubelet/pods/e37abb80-69cc-4089-9261-6af9a7fdad42/volumes" Dec 03 08:09:13 crc kubenswrapper[4612]: I1203 08:09:13.090245 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:09:13 crc kubenswrapper[4612]: E1203 08:09:13.091117 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:09:17 crc kubenswrapper[4612]: I1203 08:09:17.000465 4612 generic.go:334] "Generic (PLEG): container finished" podID="7f0ab99d-079d-4eda-8308-33f1a44d5c3b" containerID="40a1da9601fbb7c8446fedf5972bcf588c0a44ab5e8f933471c8b87b18d85749" exitCode=0 Dec 03 08:09:17 crc kubenswrapper[4612]: I1203 08:09:17.000742 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" event={"ID":"7f0ab99d-079d-4eda-8308-33f1a44d5c3b","Type":"ContainerDied","Data":"40a1da9601fbb7c8446fedf5972bcf588c0a44ab5e8f933471c8b87b18d85749"} Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.420585 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.578809 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-libvirt-combined-ca-bundle\") pod \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.578869 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-ssh-key\") pod \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.578975 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-inventory\") pod \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.579127 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-libvirt-secret-0\") pod \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.579196 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lnsn\" (UniqueName: \"kubernetes.io/projected/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-kube-api-access-6lnsn\") pod \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\" (UID: \"7f0ab99d-079d-4eda-8308-33f1a44d5c3b\") " Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.584196 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "7f0ab99d-079d-4eda-8308-33f1a44d5c3b" (UID: "7f0ab99d-079d-4eda-8308-33f1a44d5c3b"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.588097 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-kube-api-access-6lnsn" (OuterVolumeSpecName: "kube-api-access-6lnsn") pod "7f0ab99d-079d-4eda-8308-33f1a44d5c3b" (UID: "7f0ab99d-079d-4eda-8308-33f1a44d5c3b"). InnerVolumeSpecName "kube-api-access-6lnsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.606419 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "7f0ab99d-079d-4eda-8308-33f1a44d5c3b" (UID: "7f0ab99d-079d-4eda-8308-33f1a44d5c3b"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.613473 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7f0ab99d-079d-4eda-8308-33f1a44d5c3b" (UID: "7f0ab99d-079d-4eda-8308-33f1a44d5c3b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.616065 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-inventory" (OuterVolumeSpecName: "inventory") pod "7f0ab99d-079d-4eda-8308-33f1a44d5c3b" (UID: "7f0ab99d-079d-4eda-8308-33f1a44d5c3b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.681471 4612 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.681721 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.681732 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.681741 4612 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 03 08:09:18 crc kubenswrapper[4612]: I1203 08:09:18.681752 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lnsn\" (UniqueName: \"kubernetes.io/projected/7f0ab99d-079d-4eda-8308-33f1a44d5c3b-kube-api-access-6lnsn\") on node \"crc\" DevicePath \"\"" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.025936 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" event={"ID":"7f0ab99d-079d-4eda-8308-33f1a44d5c3b","Type":"ContainerDied","Data":"d54ed20e86ee370b9871dc68d88303212bf23001751c3f0e7c031ee638398ab4"} Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.026432 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d54ed20e86ee370b9871dc68d88303212bf23001751c3f0e7c031ee638398ab4" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.026074 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.140522 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx"] Dec 03 08:09:19 crc kubenswrapper[4612]: E1203 08:09:19.141001 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e37abb80-69cc-4089-9261-6af9a7fdad42" containerName="extract-content" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.141020 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="e37abb80-69cc-4089-9261-6af9a7fdad42" containerName="extract-content" Dec 03 08:09:19 crc kubenswrapper[4612]: E1203 08:09:19.141037 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e37abb80-69cc-4089-9261-6af9a7fdad42" containerName="extract-utilities" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.141046 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="e37abb80-69cc-4089-9261-6af9a7fdad42" containerName="extract-utilities" Dec 03 08:09:19 crc kubenswrapper[4612]: E1203 08:09:19.141075 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f0ab99d-079d-4eda-8308-33f1a44d5c3b" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.141084 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f0ab99d-079d-4eda-8308-33f1a44d5c3b" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 08:09:19 crc kubenswrapper[4612]: E1203 08:09:19.141098 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e37abb80-69cc-4089-9261-6af9a7fdad42" containerName="registry-server" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.141105 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="e37abb80-69cc-4089-9261-6af9a7fdad42" containerName="registry-server" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.141326 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f0ab99d-079d-4eda-8308-33f1a44d5c3b" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.141344 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="e37abb80-69cc-4089-9261-6af9a7fdad42" containerName="registry-server" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.142087 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.147286 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.147523 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.148111 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.148222 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.148338 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.148442 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.148647 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.155369 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx"] Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.291434 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.291486 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.291565 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.291640 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.291691 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.291784 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.291833 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.291870 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.291893 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw7mt\" (UniqueName: \"kubernetes.io/projected/f9b6eb07-a99d-4365-b819-81f008e2018d-kube-api-access-xw7mt\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.394180 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.394462 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.394607 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.394727 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.394839 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.394918 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw7mt\" (UniqueName: \"kubernetes.io/projected/f9b6eb07-a99d-4365-b819-81f008e2018d-kube-api-access-xw7mt\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.395010 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.395084 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.395264 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.396060 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.400815 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.400847 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.404980 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.405296 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.405656 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.408384 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.416793 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.423704 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw7mt\" (UniqueName: \"kubernetes.io/projected/f9b6eb07-a99d-4365-b819-81f008e2018d-kube-api-access-xw7mt\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sc9jx\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:19 crc kubenswrapper[4612]: I1203 08:09:19.481050 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:09:20 crc kubenswrapper[4612]: I1203 08:09:20.053163 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx"] Dec 03 08:09:21 crc kubenswrapper[4612]: I1203 08:09:21.042106 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" event={"ID":"f9b6eb07-a99d-4365-b819-81f008e2018d","Type":"ContainerStarted","Data":"57adc78234b440e1621e8e55570baf08888b94ca7a7c01d058d98c9060d27baa"} Dec 03 08:09:22 crc kubenswrapper[4612]: I1203 08:09:22.066412 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" event={"ID":"f9b6eb07-a99d-4365-b819-81f008e2018d","Type":"ContainerStarted","Data":"d70ffbf975100c81e309987b06c612c474f2349cf7648c5c9e5b2d24bfde3752"} Dec 03 08:09:22 crc kubenswrapper[4612]: I1203 08:09:22.089617 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" podStartSLOduration=2.359049487 podStartE2EDuration="3.089586897s" podCreationTimestamp="2025-12-03 08:09:19 +0000 UTC" firstStartedPulling="2025-12-03 08:09:20.066189864 +0000 UTC m=+2523.239547264" lastFinishedPulling="2025-12-03 08:09:20.796727264 +0000 UTC m=+2523.970084674" observedRunningTime="2025-12-03 08:09:22.084829078 +0000 UTC m=+2525.258186488" watchObservedRunningTime="2025-12-03 08:09:22.089586897 +0000 UTC m=+2525.262944307" Dec 03 08:09:25 crc kubenswrapper[4612]: I1203 08:09:25.091402 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:09:26 crc kubenswrapper[4612]: I1203 08:09:26.104365 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"99b9e21e3abbbf3bfcf8122b2afc3799a681a2e4bb8c620f9b0f27912b007005"} Dec 03 08:11:47 crc kubenswrapper[4612]: I1203 08:11:47.136408 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:11:47 crc kubenswrapper[4612]: I1203 08:11:47.136963 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:12:17 crc kubenswrapper[4612]: I1203 08:12:17.135526 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:12:17 crc kubenswrapper[4612]: I1203 08:12:17.136389 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:12:34 crc kubenswrapper[4612]: I1203 08:12:34.677763 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4skff"] Dec 03 08:12:34 crc kubenswrapper[4612]: I1203 08:12:34.680494 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:34 crc kubenswrapper[4612]: I1203 08:12:34.701773 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4skff"] Dec 03 08:12:34 crc kubenswrapper[4612]: I1203 08:12:34.762513 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40d7f7e3-d17b-4396-a29e-29fb195890da-catalog-content\") pod \"certified-operators-4skff\" (UID: \"40d7f7e3-d17b-4396-a29e-29fb195890da\") " pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:34 crc kubenswrapper[4612]: I1203 08:12:34.762580 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40d7f7e3-d17b-4396-a29e-29fb195890da-utilities\") pod \"certified-operators-4skff\" (UID: \"40d7f7e3-d17b-4396-a29e-29fb195890da\") " pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:34 crc kubenswrapper[4612]: I1203 08:12:34.762928 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nsfc\" (UniqueName: \"kubernetes.io/projected/40d7f7e3-d17b-4396-a29e-29fb195890da-kube-api-access-9nsfc\") pod \"certified-operators-4skff\" (UID: \"40d7f7e3-d17b-4396-a29e-29fb195890da\") " pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:34 crc kubenswrapper[4612]: I1203 08:12:34.865060 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40d7f7e3-d17b-4396-a29e-29fb195890da-catalog-content\") pod \"certified-operators-4skff\" (UID: \"40d7f7e3-d17b-4396-a29e-29fb195890da\") " pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:34 crc kubenswrapper[4612]: I1203 08:12:34.865112 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40d7f7e3-d17b-4396-a29e-29fb195890da-catalog-content\") pod \"certified-operators-4skff\" (UID: \"40d7f7e3-d17b-4396-a29e-29fb195890da\") " pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:34 crc kubenswrapper[4612]: I1203 08:12:34.865155 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40d7f7e3-d17b-4396-a29e-29fb195890da-utilities\") pod \"certified-operators-4skff\" (UID: \"40d7f7e3-d17b-4396-a29e-29fb195890da\") " pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:34 crc kubenswrapper[4612]: I1203 08:12:34.865403 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40d7f7e3-d17b-4396-a29e-29fb195890da-utilities\") pod \"certified-operators-4skff\" (UID: \"40d7f7e3-d17b-4396-a29e-29fb195890da\") " pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:34 crc kubenswrapper[4612]: I1203 08:12:34.865464 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nsfc\" (UniqueName: \"kubernetes.io/projected/40d7f7e3-d17b-4396-a29e-29fb195890da-kube-api-access-9nsfc\") pod \"certified-operators-4skff\" (UID: \"40d7f7e3-d17b-4396-a29e-29fb195890da\") " pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:34 crc kubenswrapper[4612]: I1203 08:12:34.884022 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nsfc\" (UniqueName: \"kubernetes.io/projected/40d7f7e3-d17b-4396-a29e-29fb195890da-kube-api-access-9nsfc\") pod \"certified-operators-4skff\" (UID: \"40d7f7e3-d17b-4396-a29e-29fb195890da\") " pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:35 crc kubenswrapper[4612]: I1203 08:12:35.001802 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:35 crc kubenswrapper[4612]: I1203 08:12:35.566149 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4skff"] Dec 03 08:12:36 crc kubenswrapper[4612]: I1203 08:12:36.302365 4612 generic.go:334] "Generic (PLEG): container finished" podID="40d7f7e3-d17b-4396-a29e-29fb195890da" containerID="b22c6051023e7dc35b3dbc043a829bdb37796be5528e4a2b79f62d9c061dada1" exitCode=0 Dec 03 08:12:36 crc kubenswrapper[4612]: I1203 08:12:36.302575 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4skff" event={"ID":"40d7f7e3-d17b-4396-a29e-29fb195890da","Type":"ContainerDied","Data":"b22c6051023e7dc35b3dbc043a829bdb37796be5528e4a2b79f62d9c061dada1"} Dec 03 08:12:36 crc kubenswrapper[4612]: I1203 08:12:36.302974 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4skff" event={"ID":"40d7f7e3-d17b-4396-a29e-29fb195890da","Type":"ContainerStarted","Data":"c10911683b716c4d8913346428cda2939d169fbfd4281e878e959f9cecc97b3c"} Dec 03 08:12:37 crc kubenswrapper[4612]: I1203 08:12:37.312982 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4skff" event={"ID":"40d7f7e3-d17b-4396-a29e-29fb195890da","Type":"ContainerStarted","Data":"5d3c03f9272ea7e0948b5a6a4ccc496df829b46fdf1f9c73d30026d0fa6a6f27"} Dec 03 08:12:39 crc kubenswrapper[4612]: I1203 08:12:39.344527 4612 generic.go:334] "Generic (PLEG): container finished" podID="40d7f7e3-d17b-4396-a29e-29fb195890da" containerID="5d3c03f9272ea7e0948b5a6a4ccc496df829b46fdf1f9c73d30026d0fa6a6f27" exitCode=0 Dec 03 08:12:39 crc kubenswrapper[4612]: I1203 08:12:39.344599 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4skff" event={"ID":"40d7f7e3-d17b-4396-a29e-29fb195890da","Type":"ContainerDied","Data":"5d3c03f9272ea7e0948b5a6a4ccc496df829b46fdf1f9c73d30026d0fa6a6f27"} Dec 03 08:12:40 crc kubenswrapper[4612]: I1203 08:12:40.374706 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4skff" event={"ID":"40d7f7e3-d17b-4396-a29e-29fb195890da","Type":"ContainerStarted","Data":"36f618bc17e710e6d815775f974d047cf30a94cdbf7fa35f1142a1c759314a97"} Dec 03 08:12:40 crc kubenswrapper[4612]: I1203 08:12:40.403294 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4skff" podStartSLOduration=2.756338628 podStartE2EDuration="6.403280066s" podCreationTimestamp="2025-12-03 08:12:34 +0000 UTC" firstStartedPulling="2025-12-03 08:12:36.303949793 +0000 UTC m=+2719.477307193" lastFinishedPulling="2025-12-03 08:12:39.950891231 +0000 UTC m=+2723.124248631" observedRunningTime="2025-12-03 08:12:40.402208029 +0000 UTC m=+2723.575565419" watchObservedRunningTime="2025-12-03 08:12:40.403280066 +0000 UTC m=+2723.576637466" Dec 03 08:12:45 crc kubenswrapper[4612]: I1203 08:12:45.002811 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:45 crc kubenswrapper[4612]: I1203 08:12:45.003431 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:45 crc kubenswrapper[4612]: I1203 08:12:45.056295 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:45 crc kubenswrapper[4612]: I1203 08:12:45.481584 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:45 crc kubenswrapper[4612]: I1203 08:12:45.540243 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4skff"] Dec 03 08:12:47 crc kubenswrapper[4612]: I1203 08:12:47.136110 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:12:47 crc kubenswrapper[4612]: I1203 08:12:47.136177 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:12:47 crc kubenswrapper[4612]: I1203 08:12:47.136226 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 08:12:47 crc kubenswrapper[4612]: I1203 08:12:47.136916 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"99b9e21e3abbbf3bfcf8122b2afc3799a681a2e4bb8c620f9b0f27912b007005"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:12:47 crc kubenswrapper[4612]: I1203 08:12:47.136984 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://99b9e21e3abbbf3bfcf8122b2afc3799a681a2e4bb8c620f9b0f27912b007005" gracePeriod=600 Dec 03 08:12:47 crc kubenswrapper[4612]: I1203 08:12:47.434383 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="99b9e21e3abbbf3bfcf8122b2afc3799a681a2e4bb8c620f9b0f27912b007005" exitCode=0 Dec 03 08:12:47 crc kubenswrapper[4612]: I1203 08:12:47.435140 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4skff" podUID="40d7f7e3-d17b-4396-a29e-29fb195890da" containerName="registry-server" containerID="cri-o://36f618bc17e710e6d815775f974d047cf30a94cdbf7fa35f1142a1c759314a97" gracePeriod=2 Dec 03 08:12:47 crc kubenswrapper[4612]: I1203 08:12:47.434582 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"99b9e21e3abbbf3bfcf8122b2afc3799a681a2e4bb8c620f9b0f27912b007005"} Dec 03 08:12:47 crc kubenswrapper[4612]: I1203 08:12:47.435305 4612 scope.go:117] "RemoveContainer" containerID="ffb38415c1b921e5bce21c700bda5187597cd8445a4c8f1a2f57d8d2522d2afb" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.022433 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.166804 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40d7f7e3-d17b-4396-a29e-29fb195890da-catalog-content\") pod \"40d7f7e3-d17b-4396-a29e-29fb195890da\" (UID: \"40d7f7e3-d17b-4396-a29e-29fb195890da\") " Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.167228 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40d7f7e3-d17b-4396-a29e-29fb195890da-utilities\") pod \"40d7f7e3-d17b-4396-a29e-29fb195890da\" (UID: \"40d7f7e3-d17b-4396-a29e-29fb195890da\") " Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.167412 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9nsfc\" (UniqueName: \"kubernetes.io/projected/40d7f7e3-d17b-4396-a29e-29fb195890da-kube-api-access-9nsfc\") pod \"40d7f7e3-d17b-4396-a29e-29fb195890da\" (UID: \"40d7f7e3-d17b-4396-a29e-29fb195890da\") " Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.169473 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40d7f7e3-d17b-4396-a29e-29fb195890da-utilities" (OuterVolumeSpecName: "utilities") pod "40d7f7e3-d17b-4396-a29e-29fb195890da" (UID: "40d7f7e3-d17b-4396-a29e-29fb195890da"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.175072 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40d7f7e3-d17b-4396-a29e-29fb195890da-kube-api-access-9nsfc" (OuterVolumeSpecName: "kube-api-access-9nsfc") pod "40d7f7e3-d17b-4396-a29e-29fb195890da" (UID: "40d7f7e3-d17b-4396-a29e-29fb195890da"). InnerVolumeSpecName "kube-api-access-9nsfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.205272 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40d7f7e3-d17b-4396-a29e-29fb195890da-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "40d7f7e3-d17b-4396-a29e-29fb195890da" (UID: "40d7f7e3-d17b-4396-a29e-29fb195890da"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.269557 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9nsfc\" (UniqueName: \"kubernetes.io/projected/40d7f7e3-d17b-4396-a29e-29fb195890da-kube-api-access-9nsfc\") on node \"crc\" DevicePath \"\"" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.269595 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40d7f7e3-d17b-4396-a29e-29fb195890da-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.269604 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40d7f7e3-d17b-4396-a29e-29fb195890da-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.444829 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737"} Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.451297 4612 generic.go:334] "Generic (PLEG): container finished" podID="40d7f7e3-d17b-4396-a29e-29fb195890da" containerID="36f618bc17e710e6d815775f974d047cf30a94cdbf7fa35f1142a1c759314a97" exitCode=0 Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.451324 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4skff" event={"ID":"40d7f7e3-d17b-4396-a29e-29fb195890da","Type":"ContainerDied","Data":"36f618bc17e710e6d815775f974d047cf30a94cdbf7fa35f1142a1c759314a97"} Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.451340 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4skff" event={"ID":"40d7f7e3-d17b-4396-a29e-29fb195890da","Type":"ContainerDied","Data":"c10911683b716c4d8913346428cda2939d169fbfd4281e878e959f9cecc97b3c"} Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.451356 4612 scope.go:117] "RemoveContainer" containerID="36f618bc17e710e6d815775f974d047cf30a94cdbf7fa35f1142a1c759314a97" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.451436 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4skff" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.503391 4612 scope.go:117] "RemoveContainer" containerID="5d3c03f9272ea7e0948b5a6a4ccc496df829b46fdf1f9c73d30026d0fa6a6f27" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.513994 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4skff"] Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.542110 4612 scope.go:117] "RemoveContainer" containerID="b22c6051023e7dc35b3dbc043a829bdb37796be5528e4a2b79f62d9c061dada1" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.556321 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4skff"] Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.590113 4612 scope.go:117] "RemoveContainer" containerID="36f618bc17e710e6d815775f974d047cf30a94cdbf7fa35f1142a1c759314a97" Dec 03 08:12:48 crc kubenswrapper[4612]: E1203 08:12:48.593043 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36f618bc17e710e6d815775f974d047cf30a94cdbf7fa35f1142a1c759314a97\": container with ID starting with 36f618bc17e710e6d815775f974d047cf30a94cdbf7fa35f1142a1c759314a97 not found: ID does not exist" containerID="36f618bc17e710e6d815775f974d047cf30a94cdbf7fa35f1142a1c759314a97" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.593077 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36f618bc17e710e6d815775f974d047cf30a94cdbf7fa35f1142a1c759314a97"} err="failed to get container status \"36f618bc17e710e6d815775f974d047cf30a94cdbf7fa35f1142a1c759314a97\": rpc error: code = NotFound desc = could not find container \"36f618bc17e710e6d815775f974d047cf30a94cdbf7fa35f1142a1c759314a97\": container with ID starting with 36f618bc17e710e6d815775f974d047cf30a94cdbf7fa35f1142a1c759314a97 not found: ID does not exist" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.593098 4612 scope.go:117] "RemoveContainer" containerID="5d3c03f9272ea7e0948b5a6a4ccc496df829b46fdf1f9c73d30026d0fa6a6f27" Dec 03 08:12:48 crc kubenswrapper[4612]: E1203 08:12:48.593372 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d3c03f9272ea7e0948b5a6a4ccc496df829b46fdf1f9c73d30026d0fa6a6f27\": container with ID starting with 5d3c03f9272ea7e0948b5a6a4ccc496df829b46fdf1f9c73d30026d0fa6a6f27 not found: ID does not exist" containerID="5d3c03f9272ea7e0948b5a6a4ccc496df829b46fdf1f9c73d30026d0fa6a6f27" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.593446 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d3c03f9272ea7e0948b5a6a4ccc496df829b46fdf1f9c73d30026d0fa6a6f27"} err="failed to get container status \"5d3c03f9272ea7e0948b5a6a4ccc496df829b46fdf1f9c73d30026d0fa6a6f27\": rpc error: code = NotFound desc = could not find container \"5d3c03f9272ea7e0948b5a6a4ccc496df829b46fdf1f9c73d30026d0fa6a6f27\": container with ID starting with 5d3c03f9272ea7e0948b5a6a4ccc496df829b46fdf1f9c73d30026d0fa6a6f27 not found: ID does not exist" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.593474 4612 scope.go:117] "RemoveContainer" containerID="b22c6051023e7dc35b3dbc043a829bdb37796be5528e4a2b79f62d9c061dada1" Dec 03 08:12:48 crc kubenswrapper[4612]: E1203 08:12:48.593779 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b22c6051023e7dc35b3dbc043a829bdb37796be5528e4a2b79f62d9c061dada1\": container with ID starting with b22c6051023e7dc35b3dbc043a829bdb37796be5528e4a2b79f62d9c061dada1 not found: ID does not exist" containerID="b22c6051023e7dc35b3dbc043a829bdb37796be5528e4a2b79f62d9c061dada1" Dec 03 08:12:48 crc kubenswrapper[4612]: I1203 08:12:48.593835 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b22c6051023e7dc35b3dbc043a829bdb37796be5528e4a2b79f62d9c061dada1"} err="failed to get container status \"b22c6051023e7dc35b3dbc043a829bdb37796be5528e4a2b79f62d9c061dada1\": rpc error: code = NotFound desc = could not find container \"b22c6051023e7dc35b3dbc043a829bdb37796be5528e4a2b79f62d9c061dada1\": container with ID starting with b22c6051023e7dc35b3dbc043a829bdb37796be5528e4a2b79f62d9c061dada1 not found: ID does not exist" Dec 03 08:12:49 crc kubenswrapper[4612]: I1203 08:12:49.112704 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40d7f7e3-d17b-4396-a29e-29fb195890da" path="/var/lib/kubelet/pods/40d7f7e3-d17b-4396-a29e-29fb195890da/volumes" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.717911 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fzxdc"] Dec 03 08:12:50 crc kubenswrapper[4612]: E1203 08:12:50.719162 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d7f7e3-d17b-4396-a29e-29fb195890da" containerName="extract-utilities" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.719186 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d7f7e3-d17b-4396-a29e-29fb195890da" containerName="extract-utilities" Dec 03 08:12:50 crc kubenswrapper[4612]: E1203 08:12:50.719240 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d7f7e3-d17b-4396-a29e-29fb195890da" containerName="registry-server" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.719252 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d7f7e3-d17b-4396-a29e-29fb195890da" containerName="registry-server" Dec 03 08:12:50 crc kubenswrapper[4612]: E1203 08:12:50.719269 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d7f7e3-d17b-4396-a29e-29fb195890da" containerName="extract-content" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.719280 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d7f7e3-d17b-4396-a29e-29fb195890da" containerName="extract-content" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.719603 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="40d7f7e3-d17b-4396-a29e-29fb195890da" containerName="registry-server" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.721915 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.740243 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fzxdc"] Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.823320 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7f3d75-8212-4749-9eaa-f02debef5c3f-catalog-content\") pod \"redhat-operators-fzxdc\" (UID: \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\") " pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.823419 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7f3d75-8212-4749-9eaa-f02debef5c3f-utilities\") pod \"redhat-operators-fzxdc\" (UID: \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\") " pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.823466 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8gjd\" (UniqueName: \"kubernetes.io/projected/2a7f3d75-8212-4749-9eaa-f02debef5c3f-kube-api-access-g8gjd\") pod \"redhat-operators-fzxdc\" (UID: \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\") " pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.925562 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7f3d75-8212-4749-9eaa-f02debef5c3f-utilities\") pod \"redhat-operators-fzxdc\" (UID: \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\") " pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.925630 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8gjd\" (UniqueName: \"kubernetes.io/projected/2a7f3d75-8212-4749-9eaa-f02debef5c3f-kube-api-access-g8gjd\") pod \"redhat-operators-fzxdc\" (UID: \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\") " pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.925715 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7f3d75-8212-4749-9eaa-f02debef5c3f-catalog-content\") pod \"redhat-operators-fzxdc\" (UID: \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\") " pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.926213 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7f3d75-8212-4749-9eaa-f02debef5c3f-catalog-content\") pod \"redhat-operators-fzxdc\" (UID: \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\") " pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.926284 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7f3d75-8212-4749-9eaa-f02debef5c3f-utilities\") pod \"redhat-operators-fzxdc\" (UID: \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\") " pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:12:50 crc kubenswrapper[4612]: I1203 08:12:50.955194 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8gjd\" (UniqueName: \"kubernetes.io/projected/2a7f3d75-8212-4749-9eaa-f02debef5c3f-kube-api-access-g8gjd\") pod \"redhat-operators-fzxdc\" (UID: \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\") " pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:12:51 crc kubenswrapper[4612]: I1203 08:12:51.065672 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:12:51 crc kubenswrapper[4612]: I1203 08:12:51.564487 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fzxdc"] Dec 03 08:12:52 crc kubenswrapper[4612]: I1203 08:12:52.484604 4612 generic.go:334] "Generic (PLEG): container finished" podID="2a7f3d75-8212-4749-9eaa-f02debef5c3f" containerID="963335b83683ebb30709156c6b2ddf95e5dd612f2052410fc9aca142cf45b581" exitCode=0 Dec 03 08:12:52 crc kubenswrapper[4612]: I1203 08:12:52.484662 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fzxdc" event={"ID":"2a7f3d75-8212-4749-9eaa-f02debef5c3f","Type":"ContainerDied","Data":"963335b83683ebb30709156c6b2ddf95e5dd612f2052410fc9aca142cf45b581"} Dec 03 08:12:52 crc kubenswrapper[4612]: I1203 08:12:52.485038 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fzxdc" event={"ID":"2a7f3d75-8212-4749-9eaa-f02debef5c3f","Type":"ContainerStarted","Data":"86e18bad25adc95f68c91b03efbf5a71c521df185248131f461f821305267f52"} Dec 03 08:12:53 crc kubenswrapper[4612]: I1203 08:12:53.505454 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fzxdc" event={"ID":"2a7f3d75-8212-4749-9eaa-f02debef5c3f","Type":"ContainerStarted","Data":"86e209e0ae36845d784d3755db627a675e6f4a9de0857d901361fed00497d629"} Dec 03 08:12:56 crc kubenswrapper[4612]: I1203 08:12:56.537032 4612 generic.go:334] "Generic (PLEG): container finished" podID="f9b6eb07-a99d-4365-b819-81f008e2018d" containerID="d70ffbf975100c81e309987b06c612c474f2349cf7648c5c9e5b2d24bfde3752" exitCode=0 Dec 03 08:12:56 crc kubenswrapper[4612]: I1203 08:12:56.537112 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" event={"ID":"f9b6eb07-a99d-4365-b819-81f008e2018d","Type":"ContainerDied","Data":"d70ffbf975100c81e309987b06c612c474f2349cf7648c5c9e5b2d24bfde3752"} Dec 03 08:12:57 crc kubenswrapper[4612]: I1203 08:12:57.551554 4612 generic.go:334] "Generic (PLEG): container finished" podID="2a7f3d75-8212-4749-9eaa-f02debef5c3f" containerID="86e209e0ae36845d784d3755db627a675e6f4a9de0857d901361fed00497d629" exitCode=0 Dec 03 08:12:57 crc kubenswrapper[4612]: I1203 08:12:57.551665 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fzxdc" event={"ID":"2a7f3d75-8212-4749-9eaa-f02debef5c3f","Type":"ContainerDied","Data":"86e209e0ae36845d784d3755db627a675e6f4a9de0857d901361fed00497d629"} Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.085898 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.198918 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-migration-ssh-key-1\") pod \"f9b6eb07-a99d-4365-b819-81f008e2018d\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.199251 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-extra-config-0\") pod \"f9b6eb07-a99d-4365-b819-81f008e2018d\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.199347 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-cell1-compute-config-1\") pod \"f9b6eb07-a99d-4365-b819-81f008e2018d\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.199413 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-ssh-key\") pod \"f9b6eb07-a99d-4365-b819-81f008e2018d\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.199486 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-cell1-compute-config-0\") pod \"f9b6eb07-a99d-4365-b819-81f008e2018d\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.199552 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-inventory\") pod \"f9b6eb07-a99d-4365-b819-81f008e2018d\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.199589 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xw7mt\" (UniqueName: \"kubernetes.io/projected/f9b6eb07-a99d-4365-b819-81f008e2018d-kube-api-access-xw7mt\") pod \"f9b6eb07-a99d-4365-b819-81f008e2018d\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.199612 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-migration-ssh-key-0\") pod \"f9b6eb07-a99d-4365-b819-81f008e2018d\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.199648 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-combined-ca-bundle\") pod \"f9b6eb07-a99d-4365-b819-81f008e2018d\" (UID: \"f9b6eb07-a99d-4365-b819-81f008e2018d\") " Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.218442 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9b6eb07-a99d-4365-b819-81f008e2018d-kube-api-access-xw7mt" (OuterVolumeSpecName: "kube-api-access-xw7mt") pod "f9b6eb07-a99d-4365-b819-81f008e2018d" (UID: "f9b6eb07-a99d-4365-b819-81f008e2018d"). InnerVolumeSpecName "kube-api-access-xw7mt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.229656 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "f9b6eb07-a99d-4365-b819-81f008e2018d" (UID: "f9b6eb07-a99d-4365-b819-81f008e2018d"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.234423 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "f9b6eb07-a99d-4365-b819-81f008e2018d" (UID: "f9b6eb07-a99d-4365-b819-81f008e2018d"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.252960 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-inventory" (OuterVolumeSpecName: "inventory") pod "f9b6eb07-a99d-4365-b819-81f008e2018d" (UID: "f9b6eb07-a99d-4365-b819-81f008e2018d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.253108 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f9b6eb07-a99d-4365-b819-81f008e2018d" (UID: "f9b6eb07-a99d-4365-b819-81f008e2018d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.253478 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "f9b6eb07-a99d-4365-b819-81f008e2018d" (UID: "f9b6eb07-a99d-4365-b819-81f008e2018d"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.259763 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "f9b6eb07-a99d-4365-b819-81f008e2018d" (UID: "f9b6eb07-a99d-4365-b819-81f008e2018d"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.270499 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "f9b6eb07-a99d-4365-b819-81f008e2018d" (UID: "f9b6eb07-a99d-4365-b819-81f008e2018d"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.274231 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "f9b6eb07-a99d-4365-b819-81f008e2018d" (UID: "f9b6eb07-a99d-4365-b819-81f008e2018d"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.301879 4612 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.302084 4612 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.302174 4612 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.302230 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.302430 4612 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.302515 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.302568 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xw7mt\" (UniqueName: \"kubernetes.io/projected/f9b6eb07-a99d-4365-b819-81f008e2018d-kube-api-access-xw7mt\") on node \"crc\" DevicePath \"\"" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.302619 4612 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.302678 4612 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9b6eb07-a99d-4365-b819-81f008e2018d-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.563276 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fzxdc" event={"ID":"2a7f3d75-8212-4749-9eaa-f02debef5c3f","Type":"ContainerStarted","Data":"cfc9ccbf48f6439da0022ed9825568a84d9728ecfba24d8de3ebed2c92f91c2a"} Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.566918 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" event={"ID":"f9b6eb07-a99d-4365-b819-81f008e2018d","Type":"ContainerDied","Data":"57adc78234b440e1621e8e55570baf08888b94ca7a7c01d058d98c9060d27baa"} Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.567043 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57adc78234b440e1621e8e55570baf08888b94ca7a7c01d058d98c9060d27baa" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.567149 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sc9jx" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.739640 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fzxdc" podStartSLOduration=3.116030276 podStartE2EDuration="8.739616364s" podCreationTimestamp="2025-12-03 08:12:50 +0000 UTC" firstStartedPulling="2025-12-03 08:12:52.488649993 +0000 UTC m=+2735.662007413" lastFinishedPulling="2025-12-03 08:12:58.112236101 +0000 UTC m=+2741.285593501" observedRunningTime="2025-12-03 08:12:58.612170448 +0000 UTC m=+2741.785527848" watchObservedRunningTime="2025-12-03 08:12:58.739616364 +0000 UTC m=+2741.912973774" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.746110 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm"] Dec 03 08:12:58 crc kubenswrapper[4612]: E1203 08:12:58.746572 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9b6eb07-a99d-4365-b819-81f008e2018d" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.746590 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9b6eb07-a99d-4365-b819-81f008e2018d" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.746758 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9b6eb07-a99d-4365-b819-81f008e2018d" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.747452 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.751779 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.751984 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfchj" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.752173 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.752287 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.756449 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.771554 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm"] Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.915532 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.915596 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.915702 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.915895 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.915974 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.916102 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:58 crc kubenswrapper[4612]: I1203 08:12:58.916202 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m79c9\" (UniqueName: \"kubernetes.io/projected/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-kube-api-access-m79c9\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.017870 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.017954 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.017979 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.018034 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.018071 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m79c9\" (UniqueName: \"kubernetes.io/projected/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-kube-api-access-m79c9\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.018095 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.018128 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.022874 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.023249 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.023627 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.032424 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.032781 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.032928 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.039326 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m79c9\" (UniqueName: \"kubernetes.io/projected/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-kube-api-access-m79c9\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4gccm\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.073716 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:12:59 crc kubenswrapper[4612]: I1203 08:12:59.695009 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm"] Dec 03 08:13:00 crc kubenswrapper[4612]: I1203 08:13:00.587105 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" event={"ID":"063f10ac-9f99-4bae-9eae-ec9d2ebb773f","Type":"ContainerStarted","Data":"944b0e8145471dc921c11f3be8edd547f01d7859f43a033439a91af53d542c09"} Dec 03 08:13:01 crc kubenswrapper[4612]: I1203 08:13:01.066594 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:13:01 crc kubenswrapper[4612]: I1203 08:13:01.067960 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:13:01 crc kubenswrapper[4612]: I1203 08:13:01.596461 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" event={"ID":"063f10ac-9f99-4bae-9eae-ec9d2ebb773f","Type":"ContainerStarted","Data":"dc83d5df9e8a8d551b4ab4b5559dc3a6fa254a30028b506efdd3113491ffbb80"} Dec 03 08:13:01 crc kubenswrapper[4612]: I1203 08:13:01.618558 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" podStartSLOduration=2.699363092 podStartE2EDuration="3.618535393s" podCreationTimestamp="2025-12-03 08:12:58 +0000 UTC" firstStartedPulling="2025-12-03 08:12:59.703855346 +0000 UTC m=+2742.877212746" lastFinishedPulling="2025-12-03 08:13:00.623027647 +0000 UTC m=+2743.796385047" observedRunningTime="2025-12-03 08:13:01.612379299 +0000 UTC m=+2744.785736729" watchObservedRunningTime="2025-12-03 08:13:01.618535393 +0000 UTC m=+2744.791892803" Dec 03 08:13:02 crc kubenswrapper[4612]: I1203 08:13:02.110658 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fzxdc" podUID="2a7f3d75-8212-4749-9eaa-f02debef5c3f" containerName="registry-server" probeResult="failure" output=< Dec 03 08:13:02 crc kubenswrapper[4612]: timeout: failed to connect service ":50051" within 1s Dec 03 08:13:02 crc kubenswrapper[4612]: > Dec 03 08:13:11 crc kubenswrapper[4612]: I1203 08:13:11.150300 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:13:11 crc kubenswrapper[4612]: I1203 08:13:11.220378 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:13:11 crc kubenswrapper[4612]: I1203 08:13:11.415779 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fzxdc"] Dec 03 08:13:12 crc kubenswrapper[4612]: I1203 08:13:12.716699 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fzxdc" podUID="2a7f3d75-8212-4749-9eaa-f02debef5c3f" containerName="registry-server" containerID="cri-o://cfc9ccbf48f6439da0022ed9825568a84d9728ecfba24d8de3ebed2c92f91c2a" gracePeriod=2 Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.212439 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.302322 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7f3d75-8212-4749-9eaa-f02debef5c3f-utilities\") pod \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\" (UID: \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\") " Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.302381 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8gjd\" (UniqueName: \"kubernetes.io/projected/2a7f3d75-8212-4749-9eaa-f02debef5c3f-kube-api-access-g8gjd\") pod \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\" (UID: \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\") " Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.302531 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7f3d75-8212-4749-9eaa-f02debef5c3f-catalog-content\") pod \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\" (UID: \"2a7f3d75-8212-4749-9eaa-f02debef5c3f\") " Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.303688 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a7f3d75-8212-4749-9eaa-f02debef5c3f-utilities" (OuterVolumeSpecName: "utilities") pod "2a7f3d75-8212-4749-9eaa-f02debef5c3f" (UID: "2a7f3d75-8212-4749-9eaa-f02debef5c3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.307665 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a7f3d75-8212-4749-9eaa-f02debef5c3f-kube-api-access-g8gjd" (OuterVolumeSpecName: "kube-api-access-g8gjd") pod "2a7f3d75-8212-4749-9eaa-f02debef5c3f" (UID: "2a7f3d75-8212-4749-9eaa-f02debef5c3f"). InnerVolumeSpecName "kube-api-access-g8gjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.404767 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7f3d75-8212-4749-9eaa-f02debef5c3f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.404795 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8gjd\" (UniqueName: \"kubernetes.io/projected/2a7f3d75-8212-4749-9eaa-f02debef5c3f-kube-api-access-g8gjd\") on node \"crc\" DevicePath \"\"" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.411623 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a7f3d75-8212-4749-9eaa-f02debef5c3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2a7f3d75-8212-4749-9eaa-f02debef5c3f" (UID: "2a7f3d75-8212-4749-9eaa-f02debef5c3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.506408 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7f3d75-8212-4749-9eaa-f02debef5c3f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.732720 4612 generic.go:334] "Generic (PLEG): container finished" podID="2a7f3d75-8212-4749-9eaa-f02debef5c3f" containerID="cfc9ccbf48f6439da0022ed9825568a84d9728ecfba24d8de3ebed2c92f91c2a" exitCode=0 Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.732793 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fzxdc" event={"ID":"2a7f3d75-8212-4749-9eaa-f02debef5c3f","Type":"ContainerDied","Data":"cfc9ccbf48f6439da0022ed9825568a84d9728ecfba24d8de3ebed2c92f91c2a"} Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.732863 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fzxdc" event={"ID":"2a7f3d75-8212-4749-9eaa-f02debef5c3f","Type":"ContainerDied","Data":"86e18bad25adc95f68c91b03efbf5a71c521df185248131f461f821305267f52"} Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.732889 4612 scope.go:117] "RemoveContainer" containerID="cfc9ccbf48f6439da0022ed9825568a84d9728ecfba24d8de3ebed2c92f91c2a" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.732977 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fzxdc" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.768430 4612 scope.go:117] "RemoveContainer" containerID="86e209e0ae36845d784d3755db627a675e6f4a9de0857d901361fed00497d629" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.808180 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fzxdc"] Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.820046 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fzxdc"] Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.824636 4612 scope.go:117] "RemoveContainer" containerID="963335b83683ebb30709156c6b2ddf95e5dd612f2052410fc9aca142cf45b581" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.870427 4612 scope.go:117] "RemoveContainer" containerID="cfc9ccbf48f6439da0022ed9825568a84d9728ecfba24d8de3ebed2c92f91c2a" Dec 03 08:13:13 crc kubenswrapper[4612]: E1203 08:13:13.870978 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfc9ccbf48f6439da0022ed9825568a84d9728ecfba24d8de3ebed2c92f91c2a\": container with ID starting with cfc9ccbf48f6439da0022ed9825568a84d9728ecfba24d8de3ebed2c92f91c2a not found: ID does not exist" containerID="cfc9ccbf48f6439da0022ed9825568a84d9728ecfba24d8de3ebed2c92f91c2a" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.871359 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfc9ccbf48f6439da0022ed9825568a84d9728ecfba24d8de3ebed2c92f91c2a"} err="failed to get container status \"cfc9ccbf48f6439da0022ed9825568a84d9728ecfba24d8de3ebed2c92f91c2a\": rpc error: code = NotFound desc = could not find container \"cfc9ccbf48f6439da0022ed9825568a84d9728ecfba24d8de3ebed2c92f91c2a\": container with ID starting with cfc9ccbf48f6439da0022ed9825568a84d9728ecfba24d8de3ebed2c92f91c2a not found: ID does not exist" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.871449 4612 scope.go:117] "RemoveContainer" containerID="86e209e0ae36845d784d3755db627a675e6f4a9de0857d901361fed00497d629" Dec 03 08:13:13 crc kubenswrapper[4612]: E1203 08:13:13.871748 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86e209e0ae36845d784d3755db627a675e6f4a9de0857d901361fed00497d629\": container with ID starting with 86e209e0ae36845d784d3755db627a675e6f4a9de0857d901361fed00497d629 not found: ID does not exist" containerID="86e209e0ae36845d784d3755db627a675e6f4a9de0857d901361fed00497d629" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.871921 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86e209e0ae36845d784d3755db627a675e6f4a9de0857d901361fed00497d629"} err="failed to get container status \"86e209e0ae36845d784d3755db627a675e6f4a9de0857d901361fed00497d629\": rpc error: code = NotFound desc = could not find container \"86e209e0ae36845d784d3755db627a675e6f4a9de0857d901361fed00497d629\": container with ID starting with 86e209e0ae36845d784d3755db627a675e6f4a9de0857d901361fed00497d629 not found: ID does not exist" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.872002 4612 scope.go:117] "RemoveContainer" containerID="963335b83683ebb30709156c6b2ddf95e5dd612f2052410fc9aca142cf45b581" Dec 03 08:13:13 crc kubenswrapper[4612]: E1203 08:13:13.873731 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"963335b83683ebb30709156c6b2ddf95e5dd612f2052410fc9aca142cf45b581\": container with ID starting with 963335b83683ebb30709156c6b2ddf95e5dd612f2052410fc9aca142cf45b581 not found: ID does not exist" containerID="963335b83683ebb30709156c6b2ddf95e5dd612f2052410fc9aca142cf45b581" Dec 03 08:13:13 crc kubenswrapper[4612]: I1203 08:13:13.873825 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"963335b83683ebb30709156c6b2ddf95e5dd612f2052410fc9aca142cf45b581"} err="failed to get container status \"963335b83683ebb30709156c6b2ddf95e5dd612f2052410fc9aca142cf45b581\": rpc error: code = NotFound desc = could not find container \"963335b83683ebb30709156c6b2ddf95e5dd612f2052410fc9aca142cf45b581\": container with ID starting with 963335b83683ebb30709156c6b2ddf95e5dd612f2052410fc9aca142cf45b581 not found: ID does not exist" Dec 03 08:13:15 crc kubenswrapper[4612]: I1203 08:13:15.101862 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a7f3d75-8212-4749-9eaa-f02debef5c3f" path="/var/lib/kubelet/pods/2a7f3d75-8212-4749-9eaa-f02debef5c3f/volumes" Dec 03 08:14:20 crc kubenswrapper[4612]: I1203 08:14:20.753130 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lr8gk"] Dec 03 08:14:20 crc kubenswrapper[4612]: E1203 08:14:20.753854 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a7f3d75-8212-4749-9eaa-f02debef5c3f" containerName="extract-content" Dec 03 08:14:20 crc kubenswrapper[4612]: I1203 08:14:20.753865 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a7f3d75-8212-4749-9eaa-f02debef5c3f" containerName="extract-content" Dec 03 08:14:20 crc kubenswrapper[4612]: E1203 08:14:20.753891 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a7f3d75-8212-4749-9eaa-f02debef5c3f" containerName="extract-utilities" Dec 03 08:14:20 crc kubenswrapper[4612]: I1203 08:14:20.753898 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a7f3d75-8212-4749-9eaa-f02debef5c3f" containerName="extract-utilities" Dec 03 08:14:20 crc kubenswrapper[4612]: E1203 08:14:20.753915 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a7f3d75-8212-4749-9eaa-f02debef5c3f" containerName="registry-server" Dec 03 08:14:20 crc kubenswrapper[4612]: I1203 08:14:20.753921 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a7f3d75-8212-4749-9eaa-f02debef5c3f" containerName="registry-server" Dec 03 08:14:20 crc kubenswrapper[4612]: I1203 08:14:20.754129 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a7f3d75-8212-4749-9eaa-f02debef5c3f" containerName="registry-server" Dec 03 08:14:20 crc kubenswrapper[4612]: I1203 08:14:20.755309 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:20 crc kubenswrapper[4612]: I1203 08:14:20.766475 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lr8gk"] Dec 03 08:14:20 crc kubenswrapper[4612]: I1203 08:14:20.915175 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d745878-64a1-43ac-b663-ddca3cfbe5d4-utilities\") pod \"redhat-marketplace-lr8gk\" (UID: \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\") " pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:20 crc kubenswrapper[4612]: I1203 08:14:20.915264 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d745878-64a1-43ac-b663-ddca3cfbe5d4-catalog-content\") pod \"redhat-marketplace-lr8gk\" (UID: \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\") " pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:20 crc kubenswrapper[4612]: I1203 08:14:20.915466 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wlcm\" (UniqueName: \"kubernetes.io/projected/6d745878-64a1-43ac-b663-ddca3cfbe5d4-kube-api-access-7wlcm\") pod \"redhat-marketplace-lr8gk\" (UID: \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\") " pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:21 crc kubenswrapper[4612]: I1203 08:14:21.017076 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wlcm\" (UniqueName: \"kubernetes.io/projected/6d745878-64a1-43ac-b663-ddca3cfbe5d4-kube-api-access-7wlcm\") pod \"redhat-marketplace-lr8gk\" (UID: \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\") " pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:21 crc kubenswrapper[4612]: I1203 08:14:21.017127 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d745878-64a1-43ac-b663-ddca3cfbe5d4-utilities\") pod \"redhat-marketplace-lr8gk\" (UID: \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\") " pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:21 crc kubenswrapper[4612]: I1203 08:14:21.017185 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d745878-64a1-43ac-b663-ddca3cfbe5d4-catalog-content\") pod \"redhat-marketplace-lr8gk\" (UID: \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\") " pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:21 crc kubenswrapper[4612]: I1203 08:14:21.017563 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d745878-64a1-43ac-b663-ddca3cfbe5d4-utilities\") pod \"redhat-marketplace-lr8gk\" (UID: \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\") " pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:21 crc kubenswrapper[4612]: I1203 08:14:21.017582 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d745878-64a1-43ac-b663-ddca3cfbe5d4-catalog-content\") pod \"redhat-marketplace-lr8gk\" (UID: \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\") " pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:21 crc kubenswrapper[4612]: I1203 08:14:21.035276 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wlcm\" (UniqueName: \"kubernetes.io/projected/6d745878-64a1-43ac-b663-ddca3cfbe5d4-kube-api-access-7wlcm\") pod \"redhat-marketplace-lr8gk\" (UID: \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\") " pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:21 crc kubenswrapper[4612]: I1203 08:14:21.071630 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:21 crc kubenswrapper[4612]: I1203 08:14:21.363964 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lr8gk"] Dec 03 08:14:21 crc kubenswrapper[4612]: I1203 08:14:21.519909 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lr8gk" event={"ID":"6d745878-64a1-43ac-b663-ddca3cfbe5d4","Type":"ContainerStarted","Data":"dfa2ab6033224114f98b0734fe84079e5da4e74156a84030f23989957573fbcc"} Dec 03 08:14:22 crc kubenswrapper[4612]: I1203 08:14:22.529604 4612 generic.go:334] "Generic (PLEG): container finished" podID="6d745878-64a1-43ac-b663-ddca3cfbe5d4" containerID="1ae2827731b008b0da18cf12cb49876324db3b5ea7306ab2a23fe3d85ec40226" exitCode=0 Dec 03 08:14:22 crc kubenswrapper[4612]: I1203 08:14:22.529689 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lr8gk" event={"ID":"6d745878-64a1-43ac-b663-ddca3cfbe5d4","Type":"ContainerDied","Data":"1ae2827731b008b0da18cf12cb49876324db3b5ea7306ab2a23fe3d85ec40226"} Dec 03 08:14:22 crc kubenswrapper[4612]: I1203 08:14:22.533001 4612 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 08:14:23 crc kubenswrapper[4612]: I1203 08:14:23.540559 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lr8gk" event={"ID":"6d745878-64a1-43ac-b663-ddca3cfbe5d4","Type":"ContainerStarted","Data":"f8aa2c4efd8b729d0efbd775c62ecd9ab377a11a952e8e0e6ce8c482bec421d1"} Dec 03 08:14:24 crc kubenswrapper[4612]: I1203 08:14:24.554685 4612 generic.go:334] "Generic (PLEG): container finished" podID="6d745878-64a1-43ac-b663-ddca3cfbe5d4" containerID="f8aa2c4efd8b729d0efbd775c62ecd9ab377a11a952e8e0e6ce8c482bec421d1" exitCode=0 Dec 03 08:14:24 crc kubenswrapper[4612]: I1203 08:14:24.554731 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lr8gk" event={"ID":"6d745878-64a1-43ac-b663-ddca3cfbe5d4","Type":"ContainerDied","Data":"f8aa2c4efd8b729d0efbd775c62ecd9ab377a11a952e8e0e6ce8c482bec421d1"} Dec 03 08:14:25 crc kubenswrapper[4612]: I1203 08:14:25.564928 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lr8gk" event={"ID":"6d745878-64a1-43ac-b663-ddca3cfbe5d4","Type":"ContainerStarted","Data":"c65aacef8d5cf9865ee64fdb61587657875481559a9c9e7241c926e627a3de4e"} Dec 03 08:14:25 crc kubenswrapper[4612]: I1203 08:14:25.598620 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lr8gk" podStartSLOduration=3.191219067 podStartE2EDuration="5.59860277s" podCreationTimestamp="2025-12-03 08:14:20 +0000 UTC" firstStartedPulling="2025-12-03 08:14:22.532777355 +0000 UTC m=+2825.706134755" lastFinishedPulling="2025-12-03 08:14:24.940161018 +0000 UTC m=+2828.113518458" observedRunningTime="2025-12-03 08:14:25.59180986 +0000 UTC m=+2828.765167260" watchObservedRunningTime="2025-12-03 08:14:25.59860277 +0000 UTC m=+2828.771960180" Dec 03 08:14:31 crc kubenswrapper[4612]: I1203 08:14:31.072472 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:31 crc kubenswrapper[4612]: I1203 08:14:31.073834 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:31 crc kubenswrapper[4612]: I1203 08:14:31.148692 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:31 crc kubenswrapper[4612]: I1203 08:14:31.691436 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:35 crc kubenswrapper[4612]: I1203 08:14:35.580719 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lr8gk"] Dec 03 08:14:35 crc kubenswrapper[4612]: I1203 08:14:35.581501 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lr8gk" podUID="6d745878-64a1-43ac-b663-ddca3cfbe5d4" containerName="registry-server" containerID="cri-o://c65aacef8d5cf9865ee64fdb61587657875481559a9c9e7241c926e627a3de4e" gracePeriod=2 Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.148412 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.288868 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wlcm\" (UniqueName: \"kubernetes.io/projected/6d745878-64a1-43ac-b663-ddca3cfbe5d4-kube-api-access-7wlcm\") pod \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\" (UID: \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\") " Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.289006 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d745878-64a1-43ac-b663-ddca3cfbe5d4-utilities\") pod \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\" (UID: \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\") " Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.289044 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d745878-64a1-43ac-b663-ddca3cfbe5d4-catalog-content\") pod \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\" (UID: \"6d745878-64a1-43ac-b663-ddca3cfbe5d4\") " Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.291838 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d745878-64a1-43ac-b663-ddca3cfbe5d4-utilities" (OuterVolumeSpecName: "utilities") pod "6d745878-64a1-43ac-b663-ddca3cfbe5d4" (UID: "6d745878-64a1-43ac-b663-ddca3cfbe5d4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.296105 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d745878-64a1-43ac-b663-ddca3cfbe5d4-kube-api-access-7wlcm" (OuterVolumeSpecName: "kube-api-access-7wlcm") pod "6d745878-64a1-43ac-b663-ddca3cfbe5d4" (UID: "6d745878-64a1-43ac-b663-ddca3cfbe5d4"). InnerVolumeSpecName "kube-api-access-7wlcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.310686 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d745878-64a1-43ac-b663-ddca3cfbe5d4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6d745878-64a1-43ac-b663-ddca3cfbe5d4" (UID: "6d745878-64a1-43ac-b663-ddca3cfbe5d4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.391841 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wlcm\" (UniqueName: \"kubernetes.io/projected/6d745878-64a1-43ac-b663-ddca3cfbe5d4-kube-api-access-7wlcm\") on node \"crc\" DevicePath \"\"" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.391883 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d745878-64a1-43ac-b663-ddca3cfbe5d4-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.391893 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d745878-64a1-43ac-b663-ddca3cfbe5d4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.689352 4612 generic.go:334] "Generic (PLEG): container finished" podID="6d745878-64a1-43ac-b663-ddca3cfbe5d4" containerID="c65aacef8d5cf9865ee64fdb61587657875481559a9c9e7241c926e627a3de4e" exitCode=0 Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.689412 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lr8gk" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.689438 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lr8gk" event={"ID":"6d745878-64a1-43ac-b663-ddca3cfbe5d4","Type":"ContainerDied","Data":"c65aacef8d5cf9865ee64fdb61587657875481559a9c9e7241c926e627a3de4e"} Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.689837 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lr8gk" event={"ID":"6d745878-64a1-43ac-b663-ddca3cfbe5d4","Type":"ContainerDied","Data":"dfa2ab6033224114f98b0734fe84079e5da4e74156a84030f23989957573fbcc"} Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.689866 4612 scope.go:117] "RemoveContainer" containerID="c65aacef8d5cf9865ee64fdb61587657875481559a9c9e7241c926e627a3de4e" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.729994 4612 scope.go:117] "RemoveContainer" containerID="f8aa2c4efd8b729d0efbd775c62ecd9ab377a11a952e8e0e6ce8c482bec421d1" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.744556 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lr8gk"] Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.780521 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lr8gk"] Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.792684 4612 scope.go:117] "RemoveContainer" containerID="1ae2827731b008b0da18cf12cb49876324db3b5ea7306ab2a23fe3d85ec40226" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.816415 4612 scope.go:117] "RemoveContainer" containerID="c65aacef8d5cf9865ee64fdb61587657875481559a9c9e7241c926e627a3de4e" Dec 03 08:14:36 crc kubenswrapper[4612]: E1203 08:14:36.818439 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c65aacef8d5cf9865ee64fdb61587657875481559a9c9e7241c926e627a3de4e\": container with ID starting with c65aacef8d5cf9865ee64fdb61587657875481559a9c9e7241c926e627a3de4e not found: ID does not exist" containerID="c65aacef8d5cf9865ee64fdb61587657875481559a9c9e7241c926e627a3de4e" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.818496 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c65aacef8d5cf9865ee64fdb61587657875481559a9c9e7241c926e627a3de4e"} err="failed to get container status \"c65aacef8d5cf9865ee64fdb61587657875481559a9c9e7241c926e627a3de4e\": rpc error: code = NotFound desc = could not find container \"c65aacef8d5cf9865ee64fdb61587657875481559a9c9e7241c926e627a3de4e\": container with ID starting with c65aacef8d5cf9865ee64fdb61587657875481559a9c9e7241c926e627a3de4e not found: ID does not exist" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.818527 4612 scope.go:117] "RemoveContainer" containerID="f8aa2c4efd8b729d0efbd775c62ecd9ab377a11a952e8e0e6ce8c482bec421d1" Dec 03 08:14:36 crc kubenswrapper[4612]: E1203 08:14:36.818922 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8aa2c4efd8b729d0efbd775c62ecd9ab377a11a952e8e0e6ce8c482bec421d1\": container with ID starting with f8aa2c4efd8b729d0efbd775c62ecd9ab377a11a952e8e0e6ce8c482bec421d1 not found: ID does not exist" containerID="f8aa2c4efd8b729d0efbd775c62ecd9ab377a11a952e8e0e6ce8c482bec421d1" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.818963 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8aa2c4efd8b729d0efbd775c62ecd9ab377a11a952e8e0e6ce8c482bec421d1"} err="failed to get container status \"f8aa2c4efd8b729d0efbd775c62ecd9ab377a11a952e8e0e6ce8c482bec421d1\": rpc error: code = NotFound desc = could not find container \"f8aa2c4efd8b729d0efbd775c62ecd9ab377a11a952e8e0e6ce8c482bec421d1\": container with ID starting with f8aa2c4efd8b729d0efbd775c62ecd9ab377a11a952e8e0e6ce8c482bec421d1 not found: ID does not exist" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.818981 4612 scope.go:117] "RemoveContainer" containerID="1ae2827731b008b0da18cf12cb49876324db3b5ea7306ab2a23fe3d85ec40226" Dec 03 08:14:36 crc kubenswrapper[4612]: E1203 08:14:36.819194 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ae2827731b008b0da18cf12cb49876324db3b5ea7306ab2a23fe3d85ec40226\": container with ID starting with 1ae2827731b008b0da18cf12cb49876324db3b5ea7306ab2a23fe3d85ec40226 not found: ID does not exist" containerID="1ae2827731b008b0da18cf12cb49876324db3b5ea7306ab2a23fe3d85ec40226" Dec 03 08:14:36 crc kubenswrapper[4612]: I1203 08:14:36.819220 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ae2827731b008b0da18cf12cb49876324db3b5ea7306ab2a23fe3d85ec40226"} err="failed to get container status \"1ae2827731b008b0da18cf12cb49876324db3b5ea7306ab2a23fe3d85ec40226\": rpc error: code = NotFound desc = could not find container \"1ae2827731b008b0da18cf12cb49876324db3b5ea7306ab2a23fe3d85ec40226\": container with ID starting with 1ae2827731b008b0da18cf12cb49876324db3b5ea7306ab2a23fe3d85ec40226 not found: ID does not exist" Dec 03 08:14:37 crc kubenswrapper[4612]: I1203 08:14:37.102937 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d745878-64a1-43ac-b663-ddca3cfbe5d4" path="/var/lib/kubelet/pods/6d745878-64a1-43ac-b663-ddca3cfbe5d4/volumes" Dec 03 08:14:47 crc kubenswrapper[4612]: I1203 08:14:47.136264 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:14:47 crc kubenswrapper[4612]: I1203 08:14:47.136911 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:14:58 crc kubenswrapper[4612]: E1203 08:14:58.689146 4612 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="1.6s" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.163594 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88"] Dec 03 08:15:00 crc kubenswrapper[4612]: E1203 08:15:00.164250 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d745878-64a1-43ac-b663-ddca3cfbe5d4" containerName="extract-utilities" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.164265 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d745878-64a1-43ac-b663-ddca3cfbe5d4" containerName="extract-utilities" Dec 03 08:15:00 crc kubenswrapper[4612]: E1203 08:15:00.164292 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d745878-64a1-43ac-b663-ddca3cfbe5d4" containerName="registry-server" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.164301 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d745878-64a1-43ac-b663-ddca3cfbe5d4" containerName="registry-server" Dec 03 08:15:00 crc kubenswrapper[4612]: E1203 08:15:00.164315 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d745878-64a1-43ac-b663-ddca3cfbe5d4" containerName="extract-content" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.164324 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d745878-64a1-43ac-b663-ddca3cfbe5d4" containerName="extract-content" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.164558 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d745878-64a1-43ac-b663-ddca3cfbe5d4" containerName="registry-server" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.165226 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.173031 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88"] Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.189239 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.189438 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.216525 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da913b5a-6c8b-4e59-8797-9879edf1b1c9-secret-volume\") pod \"collect-profiles-29412495-9mw88\" (UID: \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.216813 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da913b5a-6c8b-4e59-8797-9879edf1b1c9-config-volume\") pod \"collect-profiles-29412495-9mw88\" (UID: \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.217098 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwbfq\" (UniqueName: \"kubernetes.io/projected/da913b5a-6c8b-4e59-8797-9879edf1b1c9-kube-api-access-nwbfq\") pod \"collect-profiles-29412495-9mw88\" (UID: \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.318533 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da913b5a-6c8b-4e59-8797-9879edf1b1c9-secret-volume\") pod \"collect-profiles-29412495-9mw88\" (UID: \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.318566 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da913b5a-6c8b-4e59-8797-9879edf1b1c9-config-volume\") pod \"collect-profiles-29412495-9mw88\" (UID: \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.318656 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwbfq\" (UniqueName: \"kubernetes.io/projected/da913b5a-6c8b-4e59-8797-9879edf1b1c9-kube-api-access-nwbfq\") pod \"collect-profiles-29412495-9mw88\" (UID: \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.319633 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da913b5a-6c8b-4e59-8797-9879edf1b1c9-config-volume\") pod \"collect-profiles-29412495-9mw88\" (UID: \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.325587 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da913b5a-6c8b-4e59-8797-9879edf1b1c9-secret-volume\") pod \"collect-profiles-29412495-9mw88\" (UID: \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.338788 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwbfq\" (UniqueName: \"kubernetes.io/projected/da913b5a-6c8b-4e59-8797-9879edf1b1c9-kube-api-access-nwbfq\") pod \"collect-profiles-29412495-9mw88\" (UID: \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" Dec 03 08:15:00 crc kubenswrapper[4612]: I1203 08:15:00.510254 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" Dec 03 08:15:01 crc kubenswrapper[4612]: I1203 08:15:01.031590 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88"] Dec 03 08:15:01 crc kubenswrapper[4612]: I1203 08:15:01.730556 4612 generic.go:334] "Generic (PLEG): container finished" podID="da913b5a-6c8b-4e59-8797-9879edf1b1c9" containerID="d69efba6d4ede12777ad2fcad55fac5458849a0e5ec5572bbf65d3284f62aa4b" exitCode=0 Dec 03 08:15:01 crc kubenswrapper[4612]: I1203 08:15:01.730602 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" event={"ID":"da913b5a-6c8b-4e59-8797-9879edf1b1c9","Type":"ContainerDied","Data":"d69efba6d4ede12777ad2fcad55fac5458849a0e5ec5572bbf65d3284f62aa4b"} Dec 03 08:15:01 crc kubenswrapper[4612]: I1203 08:15:01.730627 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" event={"ID":"da913b5a-6c8b-4e59-8797-9879edf1b1c9","Type":"ContainerStarted","Data":"00ad22141daf44239ae380d09a5e003f711306048cdd64bfd7ec35023ee8957e"} Dec 03 08:15:03 crc kubenswrapper[4612]: I1203 08:15:03.186272 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" Dec 03 08:15:03 crc kubenswrapper[4612]: I1203 08:15:03.286457 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwbfq\" (UniqueName: \"kubernetes.io/projected/da913b5a-6c8b-4e59-8797-9879edf1b1c9-kube-api-access-nwbfq\") pod \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\" (UID: \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\") " Dec 03 08:15:03 crc kubenswrapper[4612]: I1203 08:15:03.286513 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da913b5a-6c8b-4e59-8797-9879edf1b1c9-config-volume\") pod \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\" (UID: \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\") " Dec 03 08:15:03 crc kubenswrapper[4612]: I1203 08:15:03.286703 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da913b5a-6c8b-4e59-8797-9879edf1b1c9-secret-volume\") pod \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\" (UID: \"da913b5a-6c8b-4e59-8797-9879edf1b1c9\") " Dec 03 08:15:03 crc kubenswrapper[4612]: I1203 08:15:03.288373 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da913b5a-6c8b-4e59-8797-9879edf1b1c9-config-volume" (OuterVolumeSpecName: "config-volume") pod "da913b5a-6c8b-4e59-8797-9879edf1b1c9" (UID: "da913b5a-6c8b-4e59-8797-9879edf1b1c9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:15:03 crc kubenswrapper[4612]: I1203 08:15:03.293824 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da913b5a-6c8b-4e59-8797-9879edf1b1c9-kube-api-access-nwbfq" (OuterVolumeSpecName: "kube-api-access-nwbfq") pod "da913b5a-6c8b-4e59-8797-9879edf1b1c9" (UID: "da913b5a-6c8b-4e59-8797-9879edf1b1c9"). InnerVolumeSpecName "kube-api-access-nwbfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:15:03 crc kubenswrapper[4612]: I1203 08:15:03.296983 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da913b5a-6c8b-4e59-8797-9879edf1b1c9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "da913b5a-6c8b-4e59-8797-9879edf1b1c9" (UID: "da913b5a-6c8b-4e59-8797-9879edf1b1c9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:15:03 crc kubenswrapper[4612]: I1203 08:15:03.389046 4612 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da913b5a-6c8b-4e59-8797-9879edf1b1c9-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 08:15:03 crc kubenswrapper[4612]: I1203 08:15:03.389113 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwbfq\" (UniqueName: \"kubernetes.io/projected/da913b5a-6c8b-4e59-8797-9879edf1b1c9-kube-api-access-nwbfq\") on node \"crc\" DevicePath \"\"" Dec 03 08:15:03 crc kubenswrapper[4612]: I1203 08:15:03.389134 4612 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da913b5a-6c8b-4e59-8797-9879edf1b1c9-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 08:15:03 crc kubenswrapper[4612]: I1203 08:15:03.756220 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" event={"ID":"da913b5a-6c8b-4e59-8797-9879edf1b1c9","Type":"ContainerDied","Data":"00ad22141daf44239ae380d09a5e003f711306048cdd64bfd7ec35023ee8957e"} Dec 03 08:15:03 crc kubenswrapper[4612]: I1203 08:15:03.756656 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00ad22141daf44239ae380d09a5e003f711306048cdd64bfd7ec35023ee8957e" Dec 03 08:15:03 crc kubenswrapper[4612]: I1203 08:15:03.756304 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412495-9mw88" Dec 03 08:15:04 crc kubenswrapper[4612]: I1203 08:15:04.268017 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn"] Dec 03 08:15:04 crc kubenswrapper[4612]: I1203 08:15:04.275379 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412450-cvvtn"] Dec 03 08:15:05 crc kubenswrapper[4612]: I1203 08:15:05.110802 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e" path="/var/lib/kubelet/pods/fdbdb7ec-06cc-4af0-9f15-aec5780e2b6e/volumes" Dec 03 08:15:17 crc kubenswrapper[4612]: I1203 08:15:17.135804 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:15:17 crc kubenswrapper[4612]: I1203 08:15:17.136375 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:15:45 crc kubenswrapper[4612]: I1203 08:15:45.871189 4612 scope.go:117] "RemoveContainer" containerID="2c91aa771a3949b0c87220ea3a6edee18105485f517ee72bdbf9102b19114cee" Dec 03 08:15:47 crc kubenswrapper[4612]: I1203 08:15:47.136228 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:15:47 crc kubenswrapper[4612]: I1203 08:15:47.136329 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:15:47 crc kubenswrapper[4612]: I1203 08:15:47.136401 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 08:15:47 crc kubenswrapper[4612]: I1203 08:15:47.137686 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:15:47 crc kubenswrapper[4612]: I1203 08:15:47.138145 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" gracePeriod=600 Dec 03 08:15:47 crc kubenswrapper[4612]: E1203 08:15:47.280776 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:15:47 crc kubenswrapper[4612]: I1203 08:15:47.293353 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" exitCode=0 Dec 03 08:15:47 crc kubenswrapper[4612]: I1203 08:15:47.293411 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737"} Dec 03 08:15:47 crc kubenswrapper[4612]: I1203 08:15:47.293470 4612 scope.go:117] "RemoveContainer" containerID="99b9e21e3abbbf3bfcf8122b2afc3799a681a2e4bb8c620f9b0f27912b007005" Dec 03 08:15:47 crc kubenswrapper[4612]: I1203 08:15:47.294187 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:15:47 crc kubenswrapper[4612]: E1203 08:15:47.294632 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:16:00 crc kubenswrapper[4612]: I1203 08:16:00.090580 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:16:00 crc kubenswrapper[4612]: E1203 08:16:00.091776 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:16:11 crc kubenswrapper[4612]: I1203 08:16:11.090863 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:16:11 crc kubenswrapper[4612]: E1203 08:16:11.092182 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:16:24 crc kubenswrapper[4612]: I1203 08:16:24.089378 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:16:24 crc kubenswrapper[4612]: E1203 08:16:24.092560 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:16:38 crc kubenswrapper[4612]: I1203 08:16:38.089891 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:16:38 crc kubenswrapper[4612]: E1203 08:16:38.091014 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:16:48 crc kubenswrapper[4612]: I1203 08:16:48.982741 4612 generic.go:334] "Generic (PLEG): container finished" podID="063f10ac-9f99-4bae-9eae-ec9d2ebb773f" containerID="dc83d5df9e8a8d551b4ab4b5559dc3a6fa254a30028b506efdd3113491ffbb80" exitCode=0 Dec 03 08:16:48 crc kubenswrapper[4612]: I1203 08:16:48.982831 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" event={"ID":"063f10ac-9f99-4bae-9eae-ec9d2ebb773f","Type":"ContainerDied","Data":"dc83d5df9e8a8d551b4ab4b5559dc3a6fa254a30028b506efdd3113491ffbb80"} Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.462323 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.563476 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ssh-key\") pod \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.563684 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m79c9\" (UniqueName: \"kubernetes.io/projected/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-kube-api-access-m79c9\") pod \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.564574 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-0\") pod \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.564618 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-inventory\") pod \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.564727 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-telemetry-combined-ca-bundle\") pod \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.564799 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-1\") pod \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.564849 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-2\") pod \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\" (UID: \"063f10ac-9f99-4bae-9eae-ec9d2ebb773f\") " Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.569499 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "063f10ac-9f99-4bae-9eae-ec9d2ebb773f" (UID: "063f10ac-9f99-4bae-9eae-ec9d2ebb773f"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.579015 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-kube-api-access-m79c9" (OuterVolumeSpecName: "kube-api-access-m79c9") pod "063f10ac-9f99-4bae-9eae-ec9d2ebb773f" (UID: "063f10ac-9f99-4bae-9eae-ec9d2ebb773f"). InnerVolumeSpecName "kube-api-access-m79c9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.591534 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-inventory" (OuterVolumeSpecName: "inventory") pod "063f10ac-9f99-4bae-9eae-ec9d2ebb773f" (UID: "063f10ac-9f99-4bae-9eae-ec9d2ebb773f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.598112 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "063f10ac-9f99-4bae-9eae-ec9d2ebb773f" (UID: "063f10ac-9f99-4bae-9eae-ec9d2ebb773f"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.603015 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "063f10ac-9f99-4bae-9eae-ec9d2ebb773f" (UID: "063f10ac-9f99-4bae-9eae-ec9d2ebb773f"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.624209 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "063f10ac-9f99-4bae-9eae-ec9d2ebb773f" (UID: "063f10ac-9f99-4bae-9eae-ec9d2ebb773f"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.631170 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "063f10ac-9f99-4bae-9eae-ec9d2ebb773f" (UID: "063f10ac-9f99-4bae-9eae-ec9d2ebb773f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.667528 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m79c9\" (UniqueName: \"kubernetes.io/projected/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-kube-api-access-m79c9\") on node \"crc\" DevicePath \"\"" Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.667554 4612 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.667567 4612 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.667576 4612 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.667587 4612 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.667596 4612 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 03 08:16:50 crc kubenswrapper[4612]: I1203 08:16:50.667605 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/063f10ac-9f99-4bae-9eae-ec9d2ebb773f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 08:16:51 crc kubenswrapper[4612]: I1203 08:16:51.007517 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" event={"ID":"063f10ac-9f99-4bae-9eae-ec9d2ebb773f","Type":"ContainerDied","Data":"944b0e8145471dc921c11f3be8edd547f01d7859f43a033439a91af53d542c09"} Dec 03 08:16:51 crc kubenswrapper[4612]: I1203 08:16:51.007558 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="944b0e8145471dc921c11f3be8edd547f01d7859f43a033439a91af53d542c09" Dec 03 08:16:51 crc kubenswrapper[4612]: I1203 08:16:51.007631 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4gccm" Dec 03 08:16:52 crc kubenswrapper[4612]: I1203 08:16:52.090240 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:16:52 crc kubenswrapper[4612]: E1203 08:16:52.090669 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:17:05 crc kubenswrapper[4612]: I1203 08:17:05.090474 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:17:05 crc kubenswrapper[4612]: E1203 08:17:05.091969 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:17:20 crc kubenswrapper[4612]: I1203 08:17:20.090918 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:17:20 crc kubenswrapper[4612]: E1203 08:17:20.091686 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:17:32 crc kubenswrapper[4612]: I1203 08:17:32.091202 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:17:32 crc kubenswrapper[4612]: E1203 08:17:32.092234 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.434559 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 03 08:17:36 crc kubenswrapper[4612]: E1203 08:17:36.435448 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="063f10ac-9f99-4bae-9eae-ec9d2ebb773f" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.435466 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="063f10ac-9f99-4bae-9eae-ec9d2ebb773f" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 08:17:36 crc kubenswrapper[4612]: E1203 08:17:36.435523 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da913b5a-6c8b-4e59-8797-9879edf1b1c9" containerName="collect-profiles" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.435532 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="da913b5a-6c8b-4e59-8797-9879edf1b1c9" containerName="collect-profiles" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.435775 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="063f10ac-9f99-4bae-9eae-ec9d2ebb773f" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.435797 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="da913b5a-6c8b-4e59-8797-9879edf1b1c9" containerName="collect-profiles" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.436501 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.442069 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.442268 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-6bczp" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.442347 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.442481 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.463467 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.512632 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.512732 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5cc1684d-023d-46c3-8f87-3e91941a34e9-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.512812 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.512864 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5cc1684d-023d-46c3-8f87-3e91941a34e9-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.513155 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.513444 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cc1684d-023d-46c3-8f87-3e91941a34e9-config-data\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.513537 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.513639 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5cc1684d-023d-46c3-8f87-3e91941a34e9-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.513745 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l54s9\" (UniqueName: \"kubernetes.io/projected/5cc1684d-023d-46c3-8f87-3e91941a34e9-kube-api-access-l54s9\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.615297 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.615374 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5cc1684d-023d-46c3-8f87-3e91941a34e9-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.615445 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.615480 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5cc1684d-023d-46c3-8f87-3e91941a34e9-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.615527 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.615603 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cc1684d-023d-46c3-8f87-3e91941a34e9-config-data\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.615635 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.615683 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5cc1684d-023d-46c3-8f87-3e91941a34e9-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.615705 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l54s9\" (UniqueName: \"kubernetes.io/projected/5cc1684d-023d-46c3-8f87-3e91941a34e9-kube-api-access-l54s9\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.616432 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5cc1684d-023d-46c3-8f87-3e91941a34e9-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.616446 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5cc1684d-023d-46c3-8f87-3e91941a34e9-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.616700 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.617685 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cc1684d-023d-46c3-8f87-3e91941a34e9-config-data\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.620153 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5cc1684d-023d-46c3-8f87-3e91941a34e9-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.623734 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.624404 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.633851 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.648740 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l54s9\" (UniqueName: \"kubernetes.io/projected/5cc1684d-023d-46c3-8f87-3e91941a34e9-kube-api-access-l54s9\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.666338 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " pod="openstack/tempest-tests-tempest" Dec 03 08:17:36 crc kubenswrapper[4612]: I1203 08:17:36.776138 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 08:17:37 crc kubenswrapper[4612]: I1203 08:17:37.300803 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 03 08:17:37 crc kubenswrapper[4612]: W1203 08:17:37.306183 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5cc1684d_023d_46c3_8f87_3e91941a34e9.slice/crio-0893af4b80fc2e23894cbf1dd2ee6555b9dbe3cbcf8764180ee3ce20d49c72fe WatchSource:0}: Error finding container 0893af4b80fc2e23894cbf1dd2ee6555b9dbe3cbcf8764180ee3ce20d49c72fe: Status 404 returned error can't find the container with id 0893af4b80fc2e23894cbf1dd2ee6555b9dbe3cbcf8764180ee3ce20d49c72fe Dec 03 08:17:37 crc kubenswrapper[4612]: I1203 08:17:37.496807 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5cc1684d-023d-46c3-8f87-3e91941a34e9","Type":"ContainerStarted","Data":"0893af4b80fc2e23894cbf1dd2ee6555b9dbe3cbcf8764180ee3ce20d49c72fe"} Dec 03 08:17:45 crc kubenswrapper[4612]: I1203 08:17:45.090226 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:17:45 crc kubenswrapper[4612]: E1203 08:17:45.092572 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:17:57 crc kubenswrapper[4612]: I1203 08:17:57.095550 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:17:57 crc kubenswrapper[4612]: E1203 08:17:57.096434 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:18:12 crc kubenswrapper[4612]: I1203 08:18:12.090837 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:18:12 crc kubenswrapper[4612]: E1203 08:18:12.091776 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:18:15 crc kubenswrapper[4612]: E1203 08:18:15.857560 4612 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 03 08:18:15 crc kubenswrapper[4612]: E1203 08:18:15.859264 4612 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l54s9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(5cc1684d-023d-46c3-8f87-3e91941a34e9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 08:18:15 crc kubenswrapper[4612]: E1203 08:18:15.860534 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="5cc1684d-023d-46c3-8f87-3e91941a34e9" Dec 03 08:18:15 crc kubenswrapper[4612]: E1203 08:18:15.927202 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="5cc1684d-023d-46c3-8f87-3e91941a34e9" Dec 03 08:18:27 crc kubenswrapper[4612]: I1203 08:18:27.106332 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:18:27 crc kubenswrapper[4612]: E1203 08:18:27.108105 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:18:28 crc kubenswrapper[4612]: I1203 08:18:28.555704 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 03 08:18:30 crc kubenswrapper[4612]: I1203 08:18:30.073933 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5cc1684d-023d-46c3-8f87-3e91941a34e9","Type":"ContainerStarted","Data":"fdf713b8e5bbaeb55cb8675dd27ef780b8a12f7bafc10ab74655326163053706"} Dec 03 08:18:30 crc kubenswrapper[4612]: I1203 08:18:30.107473 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.864914405 podStartE2EDuration="55.107453515s" podCreationTimestamp="2025-12-03 08:17:35 +0000 UTC" firstStartedPulling="2025-12-03 08:17:37.3076463 +0000 UTC m=+3020.481003700" lastFinishedPulling="2025-12-03 08:18:28.55018538 +0000 UTC m=+3071.723542810" observedRunningTime="2025-12-03 08:18:30.100537388 +0000 UTC m=+3073.273894818" watchObservedRunningTime="2025-12-03 08:18:30.107453515 +0000 UTC m=+3073.280810905" Dec 03 08:18:38 crc kubenswrapper[4612]: I1203 08:18:38.089202 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:18:38 crc kubenswrapper[4612]: E1203 08:18:38.090832 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:18:52 crc kubenswrapper[4612]: I1203 08:18:52.089545 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:18:52 crc kubenswrapper[4612]: E1203 08:18:52.091512 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:19:03 crc kubenswrapper[4612]: I1203 08:19:03.098682 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:19:03 crc kubenswrapper[4612]: E1203 08:19:03.099294 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:19:15 crc kubenswrapper[4612]: I1203 08:19:15.091893 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:19:15 crc kubenswrapper[4612]: E1203 08:19:15.094762 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:19:27 crc kubenswrapper[4612]: I1203 08:19:27.104889 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:19:27 crc kubenswrapper[4612]: E1203 08:19:27.105968 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:19:39 crc kubenswrapper[4612]: I1203 08:19:39.092640 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:19:39 crc kubenswrapper[4612]: E1203 08:19:39.093372 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:19:54 crc kubenswrapper[4612]: I1203 08:19:54.090662 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:19:54 crc kubenswrapper[4612]: E1203 08:19:54.109527 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:20:08 crc kubenswrapper[4612]: I1203 08:20:08.089740 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:20:08 crc kubenswrapper[4612]: E1203 08:20:08.091163 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:20:22 crc kubenswrapper[4612]: I1203 08:20:22.090155 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:20:22 crc kubenswrapper[4612]: E1203 08:20:22.090805 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:20:26 crc kubenswrapper[4612]: I1203 08:20:26.922297 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xggpj"] Dec 03 08:20:26 crc kubenswrapper[4612]: I1203 08:20:26.924459 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:26 crc kubenswrapper[4612]: I1203 08:20:26.983133 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xggpj"] Dec 03 08:20:27 crc kubenswrapper[4612]: I1203 08:20:27.047421 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngkmx\" (UniqueName: \"kubernetes.io/projected/6638cc33-5d94-4b7b-8a10-3ce552a39c20-kube-api-access-ngkmx\") pod \"community-operators-xggpj\" (UID: \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\") " pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:27 crc kubenswrapper[4612]: I1203 08:20:27.047593 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6638cc33-5d94-4b7b-8a10-3ce552a39c20-catalog-content\") pod \"community-operators-xggpj\" (UID: \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\") " pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:27 crc kubenswrapper[4612]: I1203 08:20:27.047681 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6638cc33-5d94-4b7b-8a10-3ce552a39c20-utilities\") pod \"community-operators-xggpj\" (UID: \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\") " pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:27 crc kubenswrapper[4612]: I1203 08:20:27.149918 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6638cc33-5d94-4b7b-8a10-3ce552a39c20-catalog-content\") pod \"community-operators-xggpj\" (UID: \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\") " pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:27 crc kubenswrapper[4612]: I1203 08:20:27.150227 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6638cc33-5d94-4b7b-8a10-3ce552a39c20-utilities\") pod \"community-operators-xggpj\" (UID: \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\") " pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:27 crc kubenswrapper[4612]: I1203 08:20:27.150262 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngkmx\" (UniqueName: \"kubernetes.io/projected/6638cc33-5d94-4b7b-8a10-3ce552a39c20-kube-api-access-ngkmx\") pod \"community-operators-xggpj\" (UID: \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\") " pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:27 crc kubenswrapper[4612]: I1203 08:20:27.150695 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6638cc33-5d94-4b7b-8a10-3ce552a39c20-catalog-content\") pod \"community-operators-xggpj\" (UID: \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\") " pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:27 crc kubenswrapper[4612]: I1203 08:20:27.150793 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6638cc33-5d94-4b7b-8a10-3ce552a39c20-utilities\") pod \"community-operators-xggpj\" (UID: \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\") " pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:27 crc kubenswrapper[4612]: I1203 08:20:27.174088 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngkmx\" (UniqueName: \"kubernetes.io/projected/6638cc33-5d94-4b7b-8a10-3ce552a39c20-kube-api-access-ngkmx\") pod \"community-operators-xggpj\" (UID: \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\") " pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:27 crc kubenswrapper[4612]: I1203 08:20:27.241237 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:28 crc kubenswrapper[4612]: I1203 08:20:28.036174 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xggpj"] Dec 03 08:20:28 crc kubenswrapper[4612]: I1203 08:20:28.203406 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xggpj" event={"ID":"6638cc33-5d94-4b7b-8a10-3ce552a39c20","Type":"ContainerStarted","Data":"a6ec724f86b5a40990793002aafc91daddc9e4e0b67d977b2685b0104804058b"} Dec 03 08:20:29 crc kubenswrapper[4612]: I1203 08:20:29.213070 4612 generic.go:334] "Generic (PLEG): container finished" podID="6638cc33-5d94-4b7b-8a10-3ce552a39c20" containerID="6dd76dee692cfb2aa3dcb7a1d80a4b42f7cc68156697dbbdaf8ff62dcebf1cb7" exitCode=0 Dec 03 08:20:29 crc kubenswrapper[4612]: I1203 08:20:29.213211 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xggpj" event={"ID":"6638cc33-5d94-4b7b-8a10-3ce552a39c20","Type":"ContainerDied","Data":"6dd76dee692cfb2aa3dcb7a1d80a4b42f7cc68156697dbbdaf8ff62dcebf1cb7"} Dec 03 08:20:29 crc kubenswrapper[4612]: I1203 08:20:29.217999 4612 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 08:20:30 crc kubenswrapper[4612]: I1203 08:20:30.224056 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xggpj" event={"ID":"6638cc33-5d94-4b7b-8a10-3ce552a39c20","Type":"ContainerStarted","Data":"3d26058fe8a97b0a93771d09a7739bf7d786dc83d545bf7f068cdaabc8329ae3"} Dec 03 08:20:31 crc kubenswrapper[4612]: I1203 08:20:31.236097 4612 generic.go:334] "Generic (PLEG): container finished" podID="6638cc33-5d94-4b7b-8a10-3ce552a39c20" containerID="3d26058fe8a97b0a93771d09a7739bf7d786dc83d545bf7f068cdaabc8329ae3" exitCode=0 Dec 03 08:20:31 crc kubenswrapper[4612]: I1203 08:20:31.236144 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xggpj" event={"ID":"6638cc33-5d94-4b7b-8a10-3ce552a39c20","Type":"ContainerDied","Data":"3d26058fe8a97b0a93771d09a7739bf7d786dc83d545bf7f068cdaabc8329ae3"} Dec 03 08:20:32 crc kubenswrapper[4612]: I1203 08:20:32.266321 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xggpj" event={"ID":"6638cc33-5d94-4b7b-8a10-3ce552a39c20","Type":"ContainerStarted","Data":"20bc9aa90c19503b42dc3e7a5db158070688e868bffe846febec3957a307f656"} Dec 03 08:20:32 crc kubenswrapper[4612]: I1203 08:20:32.291322 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xggpj" podStartSLOduration=3.804490107 podStartE2EDuration="6.291303802s" podCreationTimestamp="2025-12-03 08:20:26 +0000 UTC" firstStartedPulling="2025-12-03 08:20:29.216354464 +0000 UTC m=+3192.389711864" lastFinishedPulling="2025-12-03 08:20:31.703168159 +0000 UTC m=+3194.876525559" observedRunningTime="2025-12-03 08:20:32.283208308 +0000 UTC m=+3195.456565738" watchObservedRunningTime="2025-12-03 08:20:32.291303802 +0000 UTC m=+3195.464661202" Dec 03 08:20:36 crc kubenswrapper[4612]: I1203 08:20:36.090240 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:20:36 crc kubenswrapper[4612]: E1203 08:20:36.090869 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:20:37 crc kubenswrapper[4612]: I1203 08:20:37.241816 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:37 crc kubenswrapper[4612]: I1203 08:20:37.242253 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:37 crc kubenswrapper[4612]: I1203 08:20:37.346567 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:37 crc kubenswrapper[4612]: I1203 08:20:37.432537 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:37 crc kubenswrapper[4612]: I1203 08:20:37.597343 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xggpj"] Dec 03 08:20:39 crc kubenswrapper[4612]: I1203 08:20:39.359653 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xggpj" podUID="6638cc33-5d94-4b7b-8a10-3ce552a39c20" containerName="registry-server" containerID="cri-o://20bc9aa90c19503b42dc3e7a5db158070688e868bffe846febec3957a307f656" gracePeriod=2 Dec 03 08:20:39 crc kubenswrapper[4612]: I1203 08:20:39.945970 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:39 crc kubenswrapper[4612]: I1203 08:20:39.999868 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6638cc33-5d94-4b7b-8a10-3ce552a39c20-catalog-content\") pod \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\" (UID: \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\") " Dec 03 08:20:39 crc kubenswrapper[4612]: I1203 08:20:39.999989 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngkmx\" (UniqueName: \"kubernetes.io/projected/6638cc33-5d94-4b7b-8a10-3ce552a39c20-kube-api-access-ngkmx\") pod \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\" (UID: \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\") " Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.000058 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6638cc33-5d94-4b7b-8a10-3ce552a39c20-utilities\") pod \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\" (UID: \"6638cc33-5d94-4b7b-8a10-3ce552a39c20\") " Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.001033 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6638cc33-5d94-4b7b-8a10-3ce552a39c20-utilities" (OuterVolumeSpecName: "utilities") pod "6638cc33-5d94-4b7b-8a10-3ce552a39c20" (UID: "6638cc33-5d94-4b7b-8a10-3ce552a39c20"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.011185 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6638cc33-5d94-4b7b-8a10-3ce552a39c20-kube-api-access-ngkmx" (OuterVolumeSpecName: "kube-api-access-ngkmx") pod "6638cc33-5d94-4b7b-8a10-3ce552a39c20" (UID: "6638cc33-5d94-4b7b-8a10-3ce552a39c20"). InnerVolumeSpecName "kube-api-access-ngkmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.060759 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6638cc33-5d94-4b7b-8a10-3ce552a39c20-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6638cc33-5d94-4b7b-8a10-3ce552a39c20" (UID: "6638cc33-5d94-4b7b-8a10-3ce552a39c20"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.102201 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6638cc33-5d94-4b7b-8a10-3ce552a39c20-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.102226 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngkmx\" (UniqueName: \"kubernetes.io/projected/6638cc33-5d94-4b7b-8a10-3ce552a39c20-kube-api-access-ngkmx\") on node \"crc\" DevicePath \"\"" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.102236 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6638cc33-5d94-4b7b-8a10-3ce552a39c20-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.371654 4612 generic.go:334] "Generic (PLEG): container finished" podID="6638cc33-5d94-4b7b-8a10-3ce552a39c20" containerID="20bc9aa90c19503b42dc3e7a5db158070688e868bffe846febec3957a307f656" exitCode=0 Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.371699 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xggpj" event={"ID":"6638cc33-5d94-4b7b-8a10-3ce552a39c20","Type":"ContainerDied","Data":"20bc9aa90c19503b42dc3e7a5db158070688e868bffe846febec3957a307f656"} Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.371732 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xggpj" event={"ID":"6638cc33-5d94-4b7b-8a10-3ce552a39c20","Type":"ContainerDied","Data":"a6ec724f86b5a40990793002aafc91daddc9e4e0b67d977b2685b0104804058b"} Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.371756 4612 scope.go:117] "RemoveContainer" containerID="20bc9aa90c19503b42dc3e7a5db158070688e868bffe846febec3957a307f656" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.371760 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xggpj" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.417299 4612 scope.go:117] "RemoveContainer" containerID="3d26058fe8a97b0a93771d09a7739bf7d786dc83d545bf7f068cdaabc8329ae3" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.452804 4612 scope.go:117] "RemoveContainer" containerID="6dd76dee692cfb2aa3dcb7a1d80a4b42f7cc68156697dbbdaf8ff62dcebf1cb7" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.463486 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xggpj"] Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.480137 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xggpj"] Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.502918 4612 scope.go:117] "RemoveContainer" containerID="20bc9aa90c19503b42dc3e7a5db158070688e868bffe846febec3957a307f656" Dec 03 08:20:40 crc kubenswrapper[4612]: E1203 08:20:40.503795 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20bc9aa90c19503b42dc3e7a5db158070688e868bffe846febec3957a307f656\": container with ID starting with 20bc9aa90c19503b42dc3e7a5db158070688e868bffe846febec3957a307f656 not found: ID does not exist" containerID="20bc9aa90c19503b42dc3e7a5db158070688e868bffe846febec3957a307f656" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.503837 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20bc9aa90c19503b42dc3e7a5db158070688e868bffe846febec3957a307f656"} err="failed to get container status \"20bc9aa90c19503b42dc3e7a5db158070688e868bffe846febec3957a307f656\": rpc error: code = NotFound desc = could not find container \"20bc9aa90c19503b42dc3e7a5db158070688e868bffe846febec3957a307f656\": container with ID starting with 20bc9aa90c19503b42dc3e7a5db158070688e868bffe846febec3957a307f656 not found: ID does not exist" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.503863 4612 scope.go:117] "RemoveContainer" containerID="3d26058fe8a97b0a93771d09a7739bf7d786dc83d545bf7f068cdaabc8329ae3" Dec 03 08:20:40 crc kubenswrapper[4612]: E1203 08:20:40.504255 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d26058fe8a97b0a93771d09a7739bf7d786dc83d545bf7f068cdaabc8329ae3\": container with ID starting with 3d26058fe8a97b0a93771d09a7739bf7d786dc83d545bf7f068cdaabc8329ae3 not found: ID does not exist" containerID="3d26058fe8a97b0a93771d09a7739bf7d786dc83d545bf7f068cdaabc8329ae3" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.504288 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d26058fe8a97b0a93771d09a7739bf7d786dc83d545bf7f068cdaabc8329ae3"} err="failed to get container status \"3d26058fe8a97b0a93771d09a7739bf7d786dc83d545bf7f068cdaabc8329ae3\": rpc error: code = NotFound desc = could not find container \"3d26058fe8a97b0a93771d09a7739bf7d786dc83d545bf7f068cdaabc8329ae3\": container with ID starting with 3d26058fe8a97b0a93771d09a7739bf7d786dc83d545bf7f068cdaabc8329ae3 not found: ID does not exist" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.504318 4612 scope.go:117] "RemoveContainer" containerID="6dd76dee692cfb2aa3dcb7a1d80a4b42f7cc68156697dbbdaf8ff62dcebf1cb7" Dec 03 08:20:40 crc kubenswrapper[4612]: E1203 08:20:40.504535 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6dd76dee692cfb2aa3dcb7a1d80a4b42f7cc68156697dbbdaf8ff62dcebf1cb7\": container with ID starting with 6dd76dee692cfb2aa3dcb7a1d80a4b42f7cc68156697dbbdaf8ff62dcebf1cb7 not found: ID does not exist" containerID="6dd76dee692cfb2aa3dcb7a1d80a4b42f7cc68156697dbbdaf8ff62dcebf1cb7" Dec 03 08:20:40 crc kubenswrapper[4612]: I1203 08:20:40.504558 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6dd76dee692cfb2aa3dcb7a1d80a4b42f7cc68156697dbbdaf8ff62dcebf1cb7"} err="failed to get container status \"6dd76dee692cfb2aa3dcb7a1d80a4b42f7cc68156697dbbdaf8ff62dcebf1cb7\": rpc error: code = NotFound desc = could not find container \"6dd76dee692cfb2aa3dcb7a1d80a4b42f7cc68156697dbbdaf8ff62dcebf1cb7\": container with ID starting with 6dd76dee692cfb2aa3dcb7a1d80a4b42f7cc68156697dbbdaf8ff62dcebf1cb7 not found: ID does not exist" Dec 03 08:20:41 crc kubenswrapper[4612]: I1203 08:20:41.103408 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6638cc33-5d94-4b7b-8a10-3ce552a39c20" path="/var/lib/kubelet/pods/6638cc33-5d94-4b7b-8a10-3ce552a39c20/volumes" Dec 03 08:20:47 crc kubenswrapper[4612]: I1203 08:20:47.091124 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:20:47 crc kubenswrapper[4612]: E1203 08:20:47.091836 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:20:59 crc kubenswrapper[4612]: I1203 08:20:59.091344 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:20:59 crc kubenswrapper[4612]: I1203 08:20:59.751140 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"e040080346336d3b4fa4b76cff5fe18d19bbbd6d5e2505592d6ddada3dbb9672"} Dec 03 08:23:17 crc kubenswrapper[4612]: I1203 08:23:17.136433 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:23:17 crc kubenswrapper[4612]: I1203 08:23:17.137067 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.697430 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l2z9d"] Dec 03 08:23:24 crc kubenswrapper[4612]: E1203 08:23:24.698670 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6638cc33-5d94-4b7b-8a10-3ce552a39c20" containerName="registry-server" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.698693 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="6638cc33-5d94-4b7b-8a10-3ce552a39c20" containerName="registry-server" Dec 03 08:23:24 crc kubenswrapper[4612]: E1203 08:23:24.698736 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6638cc33-5d94-4b7b-8a10-3ce552a39c20" containerName="extract-utilities" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.698746 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="6638cc33-5d94-4b7b-8a10-3ce552a39c20" containerName="extract-utilities" Dec 03 08:23:24 crc kubenswrapper[4612]: E1203 08:23:24.698755 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6638cc33-5d94-4b7b-8a10-3ce552a39c20" containerName="extract-content" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.698764 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="6638cc33-5d94-4b7b-8a10-3ce552a39c20" containerName="extract-content" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.699025 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="6638cc33-5d94-4b7b-8a10-3ce552a39c20" containerName="registry-server" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.700764 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.707185 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l2z9d"] Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.791354 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8d08fb2-f71a-4982-b717-24a1d285168c-catalog-content\") pod \"certified-operators-l2z9d\" (UID: \"f8d08fb2-f71a-4982-b717-24a1d285168c\") " pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.791433 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x872k\" (UniqueName: \"kubernetes.io/projected/f8d08fb2-f71a-4982-b717-24a1d285168c-kube-api-access-x872k\") pod \"certified-operators-l2z9d\" (UID: \"f8d08fb2-f71a-4982-b717-24a1d285168c\") " pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.791589 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8d08fb2-f71a-4982-b717-24a1d285168c-utilities\") pod \"certified-operators-l2z9d\" (UID: \"f8d08fb2-f71a-4982-b717-24a1d285168c\") " pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.893507 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8d08fb2-f71a-4982-b717-24a1d285168c-catalog-content\") pod \"certified-operators-l2z9d\" (UID: \"f8d08fb2-f71a-4982-b717-24a1d285168c\") " pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.893579 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x872k\" (UniqueName: \"kubernetes.io/projected/f8d08fb2-f71a-4982-b717-24a1d285168c-kube-api-access-x872k\") pod \"certified-operators-l2z9d\" (UID: \"f8d08fb2-f71a-4982-b717-24a1d285168c\") " pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.893631 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8d08fb2-f71a-4982-b717-24a1d285168c-utilities\") pod \"certified-operators-l2z9d\" (UID: \"f8d08fb2-f71a-4982-b717-24a1d285168c\") " pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.894030 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8d08fb2-f71a-4982-b717-24a1d285168c-catalog-content\") pod \"certified-operators-l2z9d\" (UID: \"f8d08fb2-f71a-4982-b717-24a1d285168c\") " pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.894050 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8d08fb2-f71a-4982-b717-24a1d285168c-utilities\") pod \"certified-operators-l2z9d\" (UID: \"f8d08fb2-f71a-4982-b717-24a1d285168c\") " pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:24 crc kubenswrapper[4612]: I1203 08:23:24.920674 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x872k\" (UniqueName: \"kubernetes.io/projected/f8d08fb2-f71a-4982-b717-24a1d285168c-kube-api-access-x872k\") pod \"certified-operators-l2z9d\" (UID: \"f8d08fb2-f71a-4982-b717-24a1d285168c\") " pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:25 crc kubenswrapper[4612]: I1203 08:23:25.023257 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:25 crc kubenswrapper[4612]: I1203 08:23:25.520784 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l2z9d"] Dec 03 08:23:26 crc kubenswrapper[4612]: I1203 08:23:26.165784 4612 generic.go:334] "Generic (PLEG): container finished" podID="f8d08fb2-f71a-4982-b717-24a1d285168c" containerID="7ba43b7964d1a4ef16522e47c98302a34c23468019ed6e1365a72be4f598723b" exitCode=0 Dec 03 08:23:26 crc kubenswrapper[4612]: I1203 08:23:26.165834 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l2z9d" event={"ID":"f8d08fb2-f71a-4982-b717-24a1d285168c","Type":"ContainerDied","Data":"7ba43b7964d1a4ef16522e47c98302a34c23468019ed6e1365a72be4f598723b"} Dec 03 08:23:26 crc kubenswrapper[4612]: I1203 08:23:26.166783 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l2z9d" event={"ID":"f8d08fb2-f71a-4982-b717-24a1d285168c","Type":"ContainerStarted","Data":"bdeb23310b24fe492d508934e69896cc98a18cf85967c850676ea17ab5b28a2d"} Dec 03 08:23:27 crc kubenswrapper[4612]: I1203 08:23:27.176688 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l2z9d" event={"ID":"f8d08fb2-f71a-4982-b717-24a1d285168c","Type":"ContainerStarted","Data":"ddec775bd9e3e63eaaeeda2bc67d19d9780bc87d141c30dc0ddb398828509ca9"} Dec 03 08:23:29 crc kubenswrapper[4612]: I1203 08:23:29.199889 4612 generic.go:334] "Generic (PLEG): container finished" podID="f8d08fb2-f71a-4982-b717-24a1d285168c" containerID="ddec775bd9e3e63eaaeeda2bc67d19d9780bc87d141c30dc0ddb398828509ca9" exitCode=0 Dec 03 08:23:29 crc kubenswrapper[4612]: I1203 08:23:29.200074 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l2z9d" event={"ID":"f8d08fb2-f71a-4982-b717-24a1d285168c","Type":"ContainerDied","Data":"ddec775bd9e3e63eaaeeda2bc67d19d9780bc87d141c30dc0ddb398828509ca9"} Dec 03 08:23:30 crc kubenswrapper[4612]: I1203 08:23:30.210266 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l2z9d" event={"ID":"f8d08fb2-f71a-4982-b717-24a1d285168c","Type":"ContainerStarted","Data":"ce1aaea419fcb57f64eed07351a26b8aef559406adfa462f4cbc6f89c013f598"} Dec 03 08:23:30 crc kubenswrapper[4612]: I1203 08:23:30.235045 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l2z9d" podStartSLOduration=2.670686724 podStartE2EDuration="6.235028949s" podCreationTimestamp="2025-12-03 08:23:24 +0000 UTC" firstStartedPulling="2025-12-03 08:23:26.168218777 +0000 UTC m=+3369.341576177" lastFinishedPulling="2025-12-03 08:23:29.732560992 +0000 UTC m=+3372.905918402" observedRunningTime="2025-12-03 08:23:30.23385606 +0000 UTC m=+3373.407213460" watchObservedRunningTime="2025-12-03 08:23:30.235028949 +0000 UTC m=+3373.408386349" Dec 03 08:23:35 crc kubenswrapper[4612]: I1203 08:23:35.024544 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:35 crc kubenswrapper[4612]: I1203 08:23:35.025079 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:35 crc kubenswrapper[4612]: I1203 08:23:35.110739 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:35 crc kubenswrapper[4612]: I1203 08:23:35.318885 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:35 crc kubenswrapper[4612]: I1203 08:23:35.371582 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l2z9d"] Dec 03 08:23:37 crc kubenswrapper[4612]: I1203 08:23:37.272480 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-l2z9d" podUID="f8d08fb2-f71a-4982-b717-24a1d285168c" containerName="registry-server" containerID="cri-o://ce1aaea419fcb57f64eed07351a26b8aef559406adfa462f4cbc6f89c013f598" gracePeriod=2 Dec 03 08:23:37 crc kubenswrapper[4612]: I1203 08:23:37.982244 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.136936 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8d08fb2-f71a-4982-b717-24a1d285168c-catalog-content\") pod \"f8d08fb2-f71a-4982-b717-24a1d285168c\" (UID: \"f8d08fb2-f71a-4982-b717-24a1d285168c\") " Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.137442 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x872k\" (UniqueName: \"kubernetes.io/projected/f8d08fb2-f71a-4982-b717-24a1d285168c-kube-api-access-x872k\") pod \"f8d08fb2-f71a-4982-b717-24a1d285168c\" (UID: \"f8d08fb2-f71a-4982-b717-24a1d285168c\") " Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.137631 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8d08fb2-f71a-4982-b717-24a1d285168c-utilities\") pod \"f8d08fb2-f71a-4982-b717-24a1d285168c\" (UID: \"f8d08fb2-f71a-4982-b717-24a1d285168c\") " Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.138315 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8d08fb2-f71a-4982-b717-24a1d285168c-utilities" (OuterVolumeSpecName: "utilities") pod "f8d08fb2-f71a-4982-b717-24a1d285168c" (UID: "f8d08fb2-f71a-4982-b717-24a1d285168c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.147606 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8d08fb2-f71a-4982-b717-24a1d285168c-kube-api-access-x872k" (OuterVolumeSpecName: "kube-api-access-x872k") pod "f8d08fb2-f71a-4982-b717-24a1d285168c" (UID: "f8d08fb2-f71a-4982-b717-24a1d285168c"). InnerVolumeSpecName "kube-api-access-x872k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.191103 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8d08fb2-f71a-4982-b717-24a1d285168c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f8d08fb2-f71a-4982-b717-24a1d285168c" (UID: "f8d08fb2-f71a-4982-b717-24a1d285168c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.240083 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8d08fb2-f71a-4982-b717-24a1d285168c-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.240344 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8d08fb2-f71a-4982-b717-24a1d285168c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.240433 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x872k\" (UniqueName: \"kubernetes.io/projected/f8d08fb2-f71a-4982-b717-24a1d285168c-kube-api-access-x872k\") on node \"crc\" DevicePath \"\"" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.283617 4612 generic.go:334] "Generic (PLEG): container finished" podID="f8d08fb2-f71a-4982-b717-24a1d285168c" containerID="ce1aaea419fcb57f64eed07351a26b8aef559406adfa462f4cbc6f89c013f598" exitCode=0 Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.283657 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l2z9d" event={"ID":"f8d08fb2-f71a-4982-b717-24a1d285168c","Type":"ContainerDied","Data":"ce1aaea419fcb57f64eed07351a26b8aef559406adfa462f4cbc6f89c013f598"} Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.283686 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l2z9d" event={"ID":"f8d08fb2-f71a-4982-b717-24a1d285168c","Type":"ContainerDied","Data":"bdeb23310b24fe492d508934e69896cc98a18cf85967c850676ea17ab5b28a2d"} Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.283703 4612 scope.go:117] "RemoveContainer" containerID="ce1aaea419fcb57f64eed07351a26b8aef559406adfa462f4cbc6f89c013f598" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.283813 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l2z9d" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.319839 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l2z9d"] Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.325044 4612 scope.go:117] "RemoveContainer" containerID="ddec775bd9e3e63eaaeeda2bc67d19d9780bc87d141c30dc0ddb398828509ca9" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.329091 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-l2z9d"] Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.356472 4612 scope.go:117] "RemoveContainer" containerID="7ba43b7964d1a4ef16522e47c98302a34c23468019ed6e1365a72be4f598723b" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.398324 4612 scope.go:117] "RemoveContainer" containerID="ce1aaea419fcb57f64eed07351a26b8aef559406adfa462f4cbc6f89c013f598" Dec 03 08:23:38 crc kubenswrapper[4612]: E1203 08:23:38.399381 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce1aaea419fcb57f64eed07351a26b8aef559406adfa462f4cbc6f89c013f598\": container with ID starting with ce1aaea419fcb57f64eed07351a26b8aef559406adfa462f4cbc6f89c013f598 not found: ID does not exist" containerID="ce1aaea419fcb57f64eed07351a26b8aef559406adfa462f4cbc6f89c013f598" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.399422 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce1aaea419fcb57f64eed07351a26b8aef559406adfa462f4cbc6f89c013f598"} err="failed to get container status \"ce1aaea419fcb57f64eed07351a26b8aef559406adfa462f4cbc6f89c013f598\": rpc error: code = NotFound desc = could not find container \"ce1aaea419fcb57f64eed07351a26b8aef559406adfa462f4cbc6f89c013f598\": container with ID starting with ce1aaea419fcb57f64eed07351a26b8aef559406adfa462f4cbc6f89c013f598 not found: ID does not exist" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.399451 4612 scope.go:117] "RemoveContainer" containerID="ddec775bd9e3e63eaaeeda2bc67d19d9780bc87d141c30dc0ddb398828509ca9" Dec 03 08:23:38 crc kubenswrapper[4612]: E1203 08:23:38.399716 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddec775bd9e3e63eaaeeda2bc67d19d9780bc87d141c30dc0ddb398828509ca9\": container with ID starting with ddec775bd9e3e63eaaeeda2bc67d19d9780bc87d141c30dc0ddb398828509ca9 not found: ID does not exist" containerID="ddec775bd9e3e63eaaeeda2bc67d19d9780bc87d141c30dc0ddb398828509ca9" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.399737 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddec775bd9e3e63eaaeeda2bc67d19d9780bc87d141c30dc0ddb398828509ca9"} err="failed to get container status \"ddec775bd9e3e63eaaeeda2bc67d19d9780bc87d141c30dc0ddb398828509ca9\": rpc error: code = NotFound desc = could not find container \"ddec775bd9e3e63eaaeeda2bc67d19d9780bc87d141c30dc0ddb398828509ca9\": container with ID starting with ddec775bd9e3e63eaaeeda2bc67d19d9780bc87d141c30dc0ddb398828509ca9 not found: ID does not exist" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.399748 4612 scope.go:117] "RemoveContainer" containerID="7ba43b7964d1a4ef16522e47c98302a34c23468019ed6e1365a72be4f598723b" Dec 03 08:23:38 crc kubenswrapper[4612]: E1203 08:23:38.400582 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ba43b7964d1a4ef16522e47c98302a34c23468019ed6e1365a72be4f598723b\": container with ID starting with 7ba43b7964d1a4ef16522e47c98302a34c23468019ed6e1365a72be4f598723b not found: ID does not exist" containerID="7ba43b7964d1a4ef16522e47c98302a34c23468019ed6e1365a72be4f598723b" Dec 03 08:23:38 crc kubenswrapper[4612]: I1203 08:23:38.400610 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ba43b7964d1a4ef16522e47c98302a34c23468019ed6e1365a72be4f598723b"} err="failed to get container status \"7ba43b7964d1a4ef16522e47c98302a34c23468019ed6e1365a72be4f598723b\": rpc error: code = NotFound desc = could not find container \"7ba43b7964d1a4ef16522e47c98302a34c23468019ed6e1365a72be4f598723b\": container with ID starting with 7ba43b7964d1a4ef16522e47c98302a34c23468019ed6e1365a72be4f598723b not found: ID does not exist" Dec 03 08:23:39 crc kubenswrapper[4612]: I1203 08:23:39.102646 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8d08fb2-f71a-4982-b717-24a1d285168c" path="/var/lib/kubelet/pods/f8d08fb2-f71a-4982-b717-24a1d285168c/volumes" Dec 03 08:23:47 crc kubenswrapper[4612]: I1203 08:23:47.135683 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:23:47 crc kubenswrapper[4612]: I1203 08:23:47.136130 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.135025 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7tqmd"] Dec 03 08:23:50 crc kubenswrapper[4612]: E1203 08:23:50.135992 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8d08fb2-f71a-4982-b717-24a1d285168c" containerName="extract-utilities" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.136008 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8d08fb2-f71a-4982-b717-24a1d285168c" containerName="extract-utilities" Dec 03 08:23:50 crc kubenswrapper[4612]: E1203 08:23:50.136037 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8d08fb2-f71a-4982-b717-24a1d285168c" containerName="extract-content" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.136043 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8d08fb2-f71a-4982-b717-24a1d285168c" containerName="extract-content" Dec 03 08:23:50 crc kubenswrapper[4612]: E1203 08:23:50.136058 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8d08fb2-f71a-4982-b717-24a1d285168c" containerName="registry-server" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.136064 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8d08fb2-f71a-4982-b717-24a1d285168c" containerName="registry-server" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.136250 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8d08fb2-f71a-4982-b717-24a1d285168c" containerName="registry-server" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.137632 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.154985 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7tqmd"] Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.196108 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t26pj\" (UniqueName: \"kubernetes.io/projected/35ac278f-0fcd-4244-824c-13d4ec5f21d2-kube-api-access-t26pj\") pod \"redhat-operators-7tqmd\" (UID: \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\") " pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.196389 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35ac278f-0fcd-4244-824c-13d4ec5f21d2-utilities\") pod \"redhat-operators-7tqmd\" (UID: \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\") " pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.196514 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35ac278f-0fcd-4244-824c-13d4ec5f21d2-catalog-content\") pod \"redhat-operators-7tqmd\" (UID: \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\") " pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.299003 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35ac278f-0fcd-4244-824c-13d4ec5f21d2-catalog-content\") pod \"redhat-operators-7tqmd\" (UID: \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\") " pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.299425 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35ac278f-0fcd-4244-824c-13d4ec5f21d2-catalog-content\") pod \"redhat-operators-7tqmd\" (UID: \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\") " pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.299695 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t26pj\" (UniqueName: \"kubernetes.io/projected/35ac278f-0fcd-4244-824c-13d4ec5f21d2-kube-api-access-t26pj\") pod \"redhat-operators-7tqmd\" (UID: \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\") " pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.299816 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35ac278f-0fcd-4244-824c-13d4ec5f21d2-utilities\") pod \"redhat-operators-7tqmd\" (UID: \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\") " pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.300334 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35ac278f-0fcd-4244-824c-13d4ec5f21d2-utilities\") pod \"redhat-operators-7tqmd\" (UID: \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\") " pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.321422 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t26pj\" (UniqueName: \"kubernetes.io/projected/35ac278f-0fcd-4244-824c-13d4ec5f21d2-kube-api-access-t26pj\") pod \"redhat-operators-7tqmd\" (UID: \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\") " pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.465687 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:23:50 crc kubenswrapper[4612]: I1203 08:23:50.965142 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7tqmd"] Dec 03 08:23:51 crc kubenswrapper[4612]: I1203 08:23:51.407375 4612 generic.go:334] "Generic (PLEG): container finished" podID="35ac278f-0fcd-4244-824c-13d4ec5f21d2" containerID="55ba1b97277eebe07441545ac553f5eb3c7ef2164993c795cef9799e395a6f09" exitCode=0 Dec 03 08:23:51 crc kubenswrapper[4612]: I1203 08:23:51.407457 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7tqmd" event={"ID":"35ac278f-0fcd-4244-824c-13d4ec5f21d2","Type":"ContainerDied","Data":"55ba1b97277eebe07441545ac553f5eb3c7ef2164993c795cef9799e395a6f09"} Dec 03 08:23:51 crc kubenswrapper[4612]: I1203 08:23:51.407599 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7tqmd" event={"ID":"35ac278f-0fcd-4244-824c-13d4ec5f21d2","Type":"ContainerStarted","Data":"1bc41bf04fd5c97891f909a9ee175357c4ffae8f5ceda9f2ffc6b4648ad68177"} Dec 03 08:23:52 crc kubenswrapper[4612]: I1203 08:23:52.418809 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7tqmd" event={"ID":"35ac278f-0fcd-4244-824c-13d4ec5f21d2","Type":"ContainerStarted","Data":"4d0ead59e0aea272b740ff3203e777cc6a547f2c0b733fc002eadb3a0c7d2d63"} Dec 03 08:23:57 crc kubenswrapper[4612]: I1203 08:23:57.466425 4612 generic.go:334] "Generic (PLEG): container finished" podID="35ac278f-0fcd-4244-824c-13d4ec5f21d2" containerID="4d0ead59e0aea272b740ff3203e777cc6a547f2c0b733fc002eadb3a0c7d2d63" exitCode=0 Dec 03 08:23:57 crc kubenswrapper[4612]: I1203 08:23:57.466500 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7tqmd" event={"ID":"35ac278f-0fcd-4244-824c-13d4ec5f21d2","Type":"ContainerDied","Data":"4d0ead59e0aea272b740ff3203e777cc6a547f2c0b733fc002eadb3a0c7d2d63"} Dec 03 08:23:59 crc kubenswrapper[4612]: I1203 08:23:59.487051 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7tqmd" event={"ID":"35ac278f-0fcd-4244-824c-13d4ec5f21d2","Type":"ContainerStarted","Data":"5dc17664cf68cf750f08205e1d92cfc708ece2a2ec92e69366c31818b9c8f81b"} Dec 03 08:23:59 crc kubenswrapper[4612]: I1203 08:23:59.516893 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7tqmd" podStartSLOduration=1.891412097 podStartE2EDuration="9.516863532s" podCreationTimestamp="2025-12-03 08:23:50 +0000 UTC" firstStartedPulling="2025-12-03 08:23:51.41006271 +0000 UTC m=+3394.583420110" lastFinishedPulling="2025-12-03 08:23:59.035514145 +0000 UTC m=+3402.208871545" observedRunningTime="2025-12-03 08:23:59.505543648 +0000 UTC m=+3402.678901048" watchObservedRunningTime="2025-12-03 08:23:59.516863532 +0000 UTC m=+3402.690220932" Dec 03 08:24:00 crc kubenswrapper[4612]: I1203 08:24:00.465967 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:24:00 crc kubenswrapper[4612]: I1203 08:24:00.466470 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:24:01 crc kubenswrapper[4612]: I1203 08:24:01.525828 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7tqmd" podUID="35ac278f-0fcd-4244-824c-13d4ec5f21d2" containerName="registry-server" probeResult="failure" output=< Dec 03 08:24:01 crc kubenswrapper[4612]: timeout: failed to connect service ":50051" within 1s Dec 03 08:24:01 crc kubenswrapper[4612]: > Dec 03 08:24:10 crc kubenswrapper[4612]: I1203 08:24:10.520707 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:24:10 crc kubenswrapper[4612]: I1203 08:24:10.568788 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:24:10 crc kubenswrapper[4612]: I1203 08:24:10.762387 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7tqmd"] Dec 03 08:24:11 crc kubenswrapper[4612]: I1203 08:24:11.584519 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7tqmd" podUID="35ac278f-0fcd-4244-824c-13d4ec5f21d2" containerName="registry-server" containerID="cri-o://5dc17664cf68cf750f08205e1d92cfc708ece2a2ec92e69366c31818b9c8f81b" gracePeriod=2 Dec 03 08:24:12 crc kubenswrapper[4612]: I1203 08:24:12.593772 4612 generic.go:334] "Generic (PLEG): container finished" podID="35ac278f-0fcd-4244-824c-13d4ec5f21d2" containerID="5dc17664cf68cf750f08205e1d92cfc708ece2a2ec92e69366c31818b9c8f81b" exitCode=0 Dec 03 08:24:12 crc kubenswrapper[4612]: I1203 08:24:12.593992 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7tqmd" event={"ID":"35ac278f-0fcd-4244-824c-13d4ec5f21d2","Type":"ContainerDied","Data":"5dc17664cf68cf750f08205e1d92cfc708ece2a2ec92e69366c31818b9c8f81b"} Dec 03 08:24:12 crc kubenswrapper[4612]: I1203 08:24:12.820436 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:24:12 crc kubenswrapper[4612]: I1203 08:24:12.828438 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35ac278f-0fcd-4244-824c-13d4ec5f21d2-catalog-content\") pod \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\" (UID: \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\") " Dec 03 08:24:12 crc kubenswrapper[4612]: I1203 08:24:12.828527 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35ac278f-0fcd-4244-824c-13d4ec5f21d2-utilities\") pod \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\" (UID: \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\") " Dec 03 08:24:12 crc kubenswrapper[4612]: I1203 08:24:12.828764 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t26pj\" (UniqueName: \"kubernetes.io/projected/35ac278f-0fcd-4244-824c-13d4ec5f21d2-kube-api-access-t26pj\") pod \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\" (UID: \"35ac278f-0fcd-4244-824c-13d4ec5f21d2\") " Dec 03 08:24:12 crc kubenswrapper[4612]: I1203 08:24:12.829239 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35ac278f-0fcd-4244-824c-13d4ec5f21d2-utilities" (OuterVolumeSpecName: "utilities") pod "35ac278f-0fcd-4244-824c-13d4ec5f21d2" (UID: "35ac278f-0fcd-4244-824c-13d4ec5f21d2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:24:12 crc kubenswrapper[4612]: I1203 08:24:12.829368 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35ac278f-0fcd-4244-824c-13d4ec5f21d2-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:24:12 crc kubenswrapper[4612]: I1203 08:24:12.842097 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35ac278f-0fcd-4244-824c-13d4ec5f21d2-kube-api-access-t26pj" (OuterVolumeSpecName: "kube-api-access-t26pj") pod "35ac278f-0fcd-4244-824c-13d4ec5f21d2" (UID: "35ac278f-0fcd-4244-824c-13d4ec5f21d2"). InnerVolumeSpecName "kube-api-access-t26pj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:24:12 crc kubenswrapper[4612]: I1203 08:24:12.930766 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t26pj\" (UniqueName: \"kubernetes.io/projected/35ac278f-0fcd-4244-824c-13d4ec5f21d2-kube-api-access-t26pj\") on node \"crc\" DevicePath \"\"" Dec 03 08:24:12 crc kubenswrapper[4612]: I1203 08:24:12.960046 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35ac278f-0fcd-4244-824c-13d4ec5f21d2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "35ac278f-0fcd-4244-824c-13d4ec5f21d2" (UID: "35ac278f-0fcd-4244-824c-13d4ec5f21d2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:24:13 crc kubenswrapper[4612]: I1203 08:24:13.032739 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35ac278f-0fcd-4244-824c-13d4ec5f21d2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:24:13 crc kubenswrapper[4612]: I1203 08:24:13.606079 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7tqmd" event={"ID":"35ac278f-0fcd-4244-824c-13d4ec5f21d2","Type":"ContainerDied","Data":"1bc41bf04fd5c97891f909a9ee175357c4ffae8f5ceda9f2ffc6b4648ad68177"} Dec 03 08:24:13 crc kubenswrapper[4612]: I1203 08:24:13.606133 4612 scope.go:117] "RemoveContainer" containerID="5dc17664cf68cf750f08205e1d92cfc708ece2a2ec92e69366c31818b9c8f81b" Dec 03 08:24:13 crc kubenswrapper[4612]: I1203 08:24:13.606175 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7tqmd" Dec 03 08:24:13 crc kubenswrapper[4612]: I1203 08:24:13.637780 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7tqmd"] Dec 03 08:24:13 crc kubenswrapper[4612]: I1203 08:24:13.649373 4612 scope.go:117] "RemoveContainer" containerID="4d0ead59e0aea272b740ff3203e777cc6a547f2c0b733fc002eadb3a0c7d2d63" Dec 03 08:24:13 crc kubenswrapper[4612]: I1203 08:24:13.654520 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7tqmd"] Dec 03 08:24:13 crc kubenswrapper[4612]: I1203 08:24:13.682826 4612 scope.go:117] "RemoveContainer" containerID="55ba1b97277eebe07441545ac553f5eb3c7ef2164993c795cef9799e395a6f09" Dec 03 08:24:15 crc kubenswrapper[4612]: I1203 08:24:15.099824 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35ac278f-0fcd-4244-824c-13d4ec5f21d2" path="/var/lib/kubelet/pods/35ac278f-0fcd-4244-824c-13d4ec5f21d2/volumes" Dec 03 08:24:17 crc kubenswrapper[4612]: I1203 08:24:17.135422 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:24:17 crc kubenswrapper[4612]: I1203 08:24:17.135955 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:24:17 crc kubenswrapper[4612]: I1203 08:24:17.136011 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 08:24:17 crc kubenswrapper[4612]: I1203 08:24:17.136934 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e040080346336d3b4fa4b76cff5fe18d19bbbd6d5e2505592d6ddada3dbb9672"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:24:17 crc kubenswrapper[4612]: I1203 08:24:17.137005 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://e040080346336d3b4fa4b76cff5fe18d19bbbd6d5e2505592d6ddada3dbb9672" gracePeriod=600 Dec 03 08:24:17 crc kubenswrapper[4612]: I1203 08:24:17.644773 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="e040080346336d3b4fa4b76cff5fe18d19bbbd6d5e2505592d6ddada3dbb9672" exitCode=0 Dec 03 08:24:17 crc kubenswrapper[4612]: I1203 08:24:17.645152 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"e040080346336d3b4fa4b76cff5fe18d19bbbd6d5e2505592d6ddada3dbb9672"} Dec 03 08:24:17 crc kubenswrapper[4612]: I1203 08:24:17.645179 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d"} Dec 03 08:24:17 crc kubenswrapper[4612]: I1203 08:24:17.645193 4612 scope.go:117] "RemoveContainer" containerID="93f905d848fe8e9b7ed710172bcf8477c1f16c748507164ffc104348de06b737" Dec 03 08:25:35 crc kubenswrapper[4612]: I1203 08:25:35.723334 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-7b57f75fd5-642lv" podUID="2528552f-220d-4b33-990a-7793d5d8987a" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.221025 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-szwsn"] Dec 03 08:25:44 crc kubenswrapper[4612]: E1203 08:25:44.222100 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35ac278f-0fcd-4244-824c-13d4ec5f21d2" containerName="registry-server" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.222118 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="35ac278f-0fcd-4244-824c-13d4ec5f21d2" containerName="registry-server" Dec 03 08:25:44 crc kubenswrapper[4612]: E1203 08:25:44.222142 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35ac278f-0fcd-4244-824c-13d4ec5f21d2" containerName="extract-utilities" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.222151 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="35ac278f-0fcd-4244-824c-13d4ec5f21d2" containerName="extract-utilities" Dec 03 08:25:44 crc kubenswrapper[4612]: E1203 08:25:44.222178 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35ac278f-0fcd-4244-824c-13d4ec5f21d2" containerName="extract-content" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.222186 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="35ac278f-0fcd-4244-824c-13d4ec5f21d2" containerName="extract-content" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.222472 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="35ac278f-0fcd-4244-824c-13d4ec5f21d2" containerName="registry-server" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.224306 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.240221 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c104a09-4470-423c-8cac-90ea9bd21797-utilities\") pod \"redhat-marketplace-szwsn\" (UID: \"5c104a09-4470-423c-8cac-90ea9bd21797\") " pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.240360 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c104a09-4470-423c-8cac-90ea9bd21797-catalog-content\") pod \"redhat-marketplace-szwsn\" (UID: \"5c104a09-4470-423c-8cac-90ea9bd21797\") " pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.240650 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fcdp\" (UniqueName: \"kubernetes.io/projected/5c104a09-4470-423c-8cac-90ea9bd21797-kube-api-access-7fcdp\") pod \"redhat-marketplace-szwsn\" (UID: \"5c104a09-4470-423c-8cac-90ea9bd21797\") " pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.244160 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-szwsn"] Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.343016 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fcdp\" (UniqueName: \"kubernetes.io/projected/5c104a09-4470-423c-8cac-90ea9bd21797-kube-api-access-7fcdp\") pod \"redhat-marketplace-szwsn\" (UID: \"5c104a09-4470-423c-8cac-90ea9bd21797\") " pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.343347 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c104a09-4470-423c-8cac-90ea9bd21797-utilities\") pod \"redhat-marketplace-szwsn\" (UID: \"5c104a09-4470-423c-8cac-90ea9bd21797\") " pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.343385 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c104a09-4470-423c-8cac-90ea9bd21797-catalog-content\") pod \"redhat-marketplace-szwsn\" (UID: \"5c104a09-4470-423c-8cac-90ea9bd21797\") " pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.343714 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c104a09-4470-423c-8cac-90ea9bd21797-utilities\") pod \"redhat-marketplace-szwsn\" (UID: \"5c104a09-4470-423c-8cac-90ea9bd21797\") " pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.343757 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c104a09-4470-423c-8cac-90ea9bd21797-catalog-content\") pod \"redhat-marketplace-szwsn\" (UID: \"5c104a09-4470-423c-8cac-90ea9bd21797\") " pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.365401 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fcdp\" (UniqueName: \"kubernetes.io/projected/5c104a09-4470-423c-8cac-90ea9bd21797-kube-api-access-7fcdp\") pod \"redhat-marketplace-szwsn\" (UID: \"5c104a09-4470-423c-8cac-90ea9bd21797\") " pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:44 crc kubenswrapper[4612]: I1203 08:25:44.545680 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:45 crc kubenswrapper[4612]: I1203 08:25:45.025654 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-szwsn"] Dec 03 08:25:45 crc kubenswrapper[4612]: I1203 08:25:45.548238 4612 generic.go:334] "Generic (PLEG): container finished" podID="5c104a09-4470-423c-8cac-90ea9bd21797" containerID="08ba208607c5c0c18851889082a50379bc24d779515b46abe7d122f25ddb90d8" exitCode=0 Dec 03 08:25:45 crc kubenswrapper[4612]: I1203 08:25:45.548305 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-szwsn" event={"ID":"5c104a09-4470-423c-8cac-90ea9bd21797","Type":"ContainerDied","Data":"08ba208607c5c0c18851889082a50379bc24d779515b46abe7d122f25ddb90d8"} Dec 03 08:25:45 crc kubenswrapper[4612]: I1203 08:25:45.548655 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-szwsn" event={"ID":"5c104a09-4470-423c-8cac-90ea9bd21797","Type":"ContainerStarted","Data":"eb42c6c7eb1c4160bc57087fdb420ffa9e80b02c6a50bf01eb19afae6d3c299f"} Dec 03 08:25:45 crc kubenswrapper[4612]: I1203 08:25:45.549990 4612 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 08:25:46 crc kubenswrapper[4612]: I1203 08:25:46.558085 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-szwsn" event={"ID":"5c104a09-4470-423c-8cac-90ea9bd21797","Type":"ContainerStarted","Data":"a0c78be44cdacc081a1901ffe4c7e5b59bd55a1a837841a1a2ac98bee29a9f16"} Dec 03 08:25:47 crc kubenswrapper[4612]: I1203 08:25:47.571065 4612 generic.go:334] "Generic (PLEG): container finished" podID="5c104a09-4470-423c-8cac-90ea9bd21797" containerID="a0c78be44cdacc081a1901ffe4c7e5b59bd55a1a837841a1a2ac98bee29a9f16" exitCode=0 Dec 03 08:25:47 crc kubenswrapper[4612]: I1203 08:25:47.571288 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-szwsn" event={"ID":"5c104a09-4470-423c-8cac-90ea9bd21797","Type":"ContainerDied","Data":"a0c78be44cdacc081a1901ffe4c7e5b59bd55a1a837841a1a2ac98bee29a9f16"} Dec 03 08:25:48 crc kubenswrapper[4612]: I1203 08:25:48.580668 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-szwsn" event={"ID":"5c104a09-4470-423c-8cac-90ea9bd21797","Type":"ContainerStarted","Data":"4607f09b1aeb0d0261755a8f1b260e2a729f5d99eb7e878f28ddd0aeb4b21082"} Dec 03 08:25:48 crc kubenswrapper[4612]: I1203 08:25:48.601895 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-szwsn" podStartSLOduration=2.13337871 podStartE2EDuration="4.601875537s" podCreationTimestamp="2025-12-03 08:25:44 +0000 UTC" firstStartedPulling="2025-12-03 08:25:45.549676723 +0000 UTC m=+3508.723034123" lastFinishedPulling="2025-12-03 08:25:48.01817354 +0000 UTC m=+3511.191530950" observedRunningTime="2025-12-03 08:25:48.59760557 +0000 UTC m=+3511.770962970" watchObservedRunningTime="2025-12-03 08:25:48.601875537 +0000 UTC m=+3511.775232937" Dec 03 08:25:54 crc kubenswrapper[4612]: I1203 08:25:54.546772 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:54 crc kubenswrapper[4612]: I1203 08:25:54.548151 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:54 crc kubenswrapper[4612]: I1203 08:25:54.597148 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:54 crc kubenswrapper[4612]: I1203 08:25:54.680561 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:54 crc kubenswrapper[4612]: I1203 08:25:54.838277 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-szwsn"] Dec 03 08:25:56 crc kubenswrapper[4612]: I1203 08:25:56.642136 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-szwsn" podUID="5c104a09-4470-423c-8cac-90ea9bd21797" containerName="registry-server" containerID="cri-o://4607f09b1aeb0d0261755a8f1b260e2a729f5d99eb7e878f28ddd0aeb4b21082" gracePeriod=2 Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.331782 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.461134 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fcdp\" (UniqueName: \"kubernetes.io/projected/5c104a09-4470-423c-8cac-90ea9bd21797-kube-api-access-7fcdp\") pod \"5c104a09-4470-423c-8cac-90ea9bd21797\" (UID: \"5c104a09-4470-423c-8cac-90ea9bd21797\") " Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.461291 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c104a09-4470-423c-8cac-90ea9bd21797-utilities\") pod \"5c104a09-4470-423c-8cac-90ea9bd21797\" (UID: \"5c104a09-4470-423c-8cac-90ea9bd21797\") " Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.461506 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c104a09-4470-423c-8cac-90ea9bd21797-catalog-content\") pod \"5c104a09-4470-423c-8cac-90ea9bd21797\" (UID: \"5c104a09-4470-423c-8cac-90ea9bd21797\") " Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.462059 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c104a09-4470-423c-8cac-90ea9bd21797-utilities" (OuterVolumeSpecName: "utilities") pod "5c104a09-4470-423c-8cac-90ea9bd21797" (UID: "5c104a09-4470-423c-8cac-90ea9bd21797"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.479261 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c104a09-4470-423c-8cac-90ea9bd21797-kube-api-access-7fcdp" (OuterVolumeSpecName: "kube-api-access-7fcdp") pod "5c104a09-4470-423c-8cac-90ea9bd21797" (UID: "5c104a09-4470-423c-8cac-90ea9bd21797"). InnerVolumeSpecName "kube-api-access-7fcdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.505008 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c104a09-4470-423c-8cac-90ea9bd21797-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5c104a09-4470-423c-8cac-90ea9bd21797" (UID: "5c104a09-4470-423c-8cac-90ea9bd21797"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.564834 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c104a09-4470-423c-8cac-90ea9bd21797-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.564862 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c104a09-4470-423c-8cac-90ea9bd21797-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.564873 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fcdp\" (UniqueName: \"kubernetes.io/projected/5c104a09-4470-423c-8cac-90ea9bd21797-kube-api-access-7fcdp\") on node \"crc\" DevicePath \"\"" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.683087 4612 generic.go:334] "Generic (PLEG): container finished" podID="5c104a09-4470-423c-8cac-90ea9bd21797" containerID="4607f09b1aeb0d0261755a8f1b260e2a729f5d99eb7e878f28ddd0aeb4b21082" exitCode=0 Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.683141 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-szwsn" event={"ID":"5c104a09-4470-423c-8cac-90ea9bd21797","Type":"ContainerDied","Data":"4607f09b1aeb0d0261755a8f1b260e2a729f5d99eb7e878f28ddd0aeb4b21082"} Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.683167 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-szwsn" event={"ID":"5c104a09-4470-423c-8cac-90ea9bd21797","Type":"ContainerDied","Data":"eb42c6c7eb1c4160bc57087fdb420ffa9e80b02c6a50bf01eb19afae6d3c299f"} Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.683183 4612 scope.go:117] "RemoveContainer" containerID="4607f09b1aeb0d0261755a8f1b260e2a729f5d99eb7e878f28ddd0aeb4b21082" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.683358 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-szwsn" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.747148 4612 scope.go:117] "RemoveContainer" containerID="a0c78be44cdacc081a1901ffe4c7e5b59bd55a1a837841a1a2ac98bee29a9f16" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.763792 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-szwsn"] Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.772402 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-szwsn"] Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.783954 4612 scope.go:117] "RemoveContainer" containerID="08ba208607c5c0c18851889082a50379bc24d779515b46abe7d122f25ddb90d8" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.833833 4612 scope.go:117] "RemoveContainer" containerID="4607f09b1aeb0d0261755a8f1b260e2a729f5d99eb7e878f28ddd0aeb4b21082" Dec 03 08:25:57 crc kubenswrapper[4612]: E1203 08:25:57.836831 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4607f09b1aeb0d0261755a8f1b260e2a729f5d99eb7e878f28ddd0aeb4b21082\": container with ID starting with 4607f09b1aeb0d0261755a8f1b260e2a729f5d99eb7e878f28ddd0aeb4b21082 not found: ID does not exist" containerID="4607f09b1aeb0d0261755a8f1b260e2a729f5d99eb7e878f28ddd0aeb4b21082" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.836862 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4607f09b1aeb0d0261755a8f1b260e2a729f5d99eb7e878f28ddd0aeb4b21082"} err="failed to get container status \"4607f09b1aeb0d0261755a8f1b260e2a729f5d99eb7e878f28ddd0aeb4b21082\": rpc error: code = NotFound desc = could not find container \"4607f09b1aeb0d0261755a8f1b260e2a729f5d99eb7e878f28ddd0aeb4b21082\": container with ID starting with 4607f09b1aeb0d0261755a8f1b260e2a729f5d99eb7e878f28ddd0aeb4b21082 not found: ID does not exist" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.836885 4612 scope.go:117] "RemoveContainer" containerID="a0c78be44cdacc081a1901ffe4c7e5b59bd55a1a837841a1a2ac98bee29a9f16" Dec 03 08:25:57 crc kubenswrapper[4612]: E1203 08:25:57.838160 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0c78be44cdacc081a1901ffe4c7e5b59bd55a1a837841a1a2ac98bee29a9f16\": container with ID starting with a0c78be44cdacc081a1901ffe4c7e5b59bd55a1a837841a1a2ac98bee29a9f16 not found: ID does not exist" containerID="a0c78be44cdacc081a1901ffe4c7e5b59bd55a1a837841a1a2ac98bee29a9f16" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.838214 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0c78be44cdacc081a1901ffe4c7e5b59bd55a1a837841a1a2ac98bee29a9f16"} err="failed to get container status \"a0c78be44cdacc081a1901ffe4c7e5b59bd55a1a837841a1a2ac98bee29a9f16\": rpc error: code = NotFound desc = could not find container \"a0c78be44cdacc081a1901ffe4c7e5b59bd55a1a837841a1a2ac98bee29a9f16\": container with ID starting with a0c78be44cdacc081a1901ffe4c7e5b59bd55a1a837841a1a2ac98bee29a9f16 not found: ID does not exist" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.838250 4612 scope.go:117] "RemoveContainer" containerID="08ba208607c5c0c18851889082a50379bc24d779515b46abe7d122f25ddb90d8" Dec 03 08:25:57 crc kubenswrapper[4612]: E1203 08:25:57.843238 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08ba208607c5c0c18851889082a50379bc24d779515b46abe7d122f25ddb90d8\": container with ID starting with 08ba208607c5c0c18851889082a50379bc24d779515b46abe7d122f25ddb90d8 not found: ID does not exist" containerID="08ba208607c5c0c18851889082a50379bc24d779515b46abe7d122f25ddb90d8" Dec 03 08:25:57 crc kubenswrapper[4612]: I1203 08:25:57.843282 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08ba208607c5c0c18851889082a50379bc24d779515b46abe7d122f25ddb90d8"} err="failed to get container status \"08ba208607c5c0c18851889082a50379bc24d779515b46abe7d122f25ddb90d8\": rpc error: code = NotFound desc = could not find container \"08ba208607c5c0c18851889082a50379bc24d779515b46abe7d122f25ddb90d8\": container with ID starting with 08ba208607c5c0c18851889082a50379bc24d779515b46abe7d122f25ddb90d8 not found: ID does not exist" Dec 03 08:25:59 crc kubenswrapper[4612]: I1203 08:25:59.108508 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c104a09-4470-423c-8cac-90ea9bd21797" path="/var/lib/kubelet/pods/5c104a09-4470-423c-8cac-90ea9bd21797/volumes" Dec 03 08:26:17 crc kubenswrapper[4612]: I1203 08:26:17.135875 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:26:17 crc kubenswrapper[4612]: I1203 08:26:17.136431 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:26:47 crc kubenswrapper[4612]: I1203 08:26:47.136319 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:26:47 crc kubenswrapper[4612]: I1203 08:26:47.136981 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:27:17 crc kubenswrapper[4612]: I1203 08:27:17.136056 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:27:17 crc kubenswrapper[4612]: I1203 08:27:17.136606 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:27:17 crc kubenswrapper[4612]: I1203 08:27:17.136649 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 08:27:17 crc kubenswrapper[4612]: I1203 08:27:17.137326 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:27:17 crc kubenswrapper[4612]: I1203 08:27:17.137380 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" gracePeriod=600 Dec 03 08:27:17 crc kubenswrapper[4612]: I1203 08:27:17.447731 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" exitCode=0 Dec 03 08:27:17 crc kubenswrapper[4612]: I1203 08:27:17.447780 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d"} Dec 03 08:27:17 crc kubenswrapper[4612]: I1203 08:27:17.447821 4612 scope.go:117] "RemoveContainer" containerID="e040080346336d3b4fa4b76cff5fe18d19bbbd6d5e2505592d6ddada3dbb9672" Dec 03 08:27:17 crc kubenswrapper[4612]: E1203 08:27:17.631394 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:27:18 crc kubenswrapper[4612]: I1203 08:27:18.460374 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:27:18 crc kubenswrapper[4612]: E1203 08:27:18.461770 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:27:33 crc kubenswrapper[4612]: I1203 08:27:33.093058 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:27:33 crc kubenswrapper[4612]: E1203 08:27:33.093789 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:27:44 crc kubenswrapper[4612]: I1203 08:27:44.090586 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:27:44 crc kubenswrapper[4612]: E1203 08:27:44.091410 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:27:55 crc kubenswrapper[4612]: I1203 08:27:55.911182 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:27:55 crc kubenswrapper[4612]: E1203 08:27:55.912503 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:28:09 crc kubenswrapper[4612]: I1203 08:28:09.089996 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:28:09 crc kubenswrapper[4612]: E1203 08:28:09.101803 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:28:23 crc kubenswrapper[4612]: I1203 08:28:23.090311 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:28:23 crc kubenswrapper[4612]: E1203 08:28:23.091712 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:28:36 crc kubenswrapper[4612]: I1203 08:28:36.089255 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:28:36 crc kubenswrapper[4612]: E1203 08:28:36.089837 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:28:50 crc kubenswrapper[4612]: I1203 08:28:50.090654 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:28:50 crc kubenswrapper[4612]: E1203 08:28:50.092096 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:29:03 crc kubenswrapper[4612]: I1203 08:29:03.102720 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:29:03 crc kubenswrapper[4612]: E1203 08:29:03.107476 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:29:15 crc kubenswrapper[4612]: I1203 08:29:15.090000 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:29:15 crc kubenswrapper[4612]: E1203 08:29:15.090776 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:29:29 crc kubenswrapper[4612]: I1203 08:29:29.090123 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:29:29 crc kubenswrapper[4612]: E1203 08:29:29.092461 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:29:40 crc kubenswrapper[4612]: I1203 08:29:40.090859 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:29:40 crc kubenswrapper[4612]: E1203 08:29:40.093385 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:29:54 crc kubenswrapper[4612]: I1203 08:29:54.090236 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:29:54 crc kubenswrapper[4612]: E1203 08:29:54.091103 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.185026 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr"] Dec 03 08:30:00 crc kubenswrapper[4612]: E1203 08:30:00.185829 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c104a09-4470-423c-8cac-90ea9bd21797" containerName="extract-content" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.185904 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c104a09-4470-423c-8cac-90ea9bd21797" containerName="extract-content" Dec 03 08:30:00 crc kubenswrapper[4612]: E1203 08:30:00.185915 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c104a09-4470-423c-8cac-90ea9bd21797" containerName="extract-utilities" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.185922 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c104a09-4470-423c-8cac-90ea9bd21797" containerName="extract-utilities" Dec 03 08:30:00 crc kubenswrapper[4612]: E1203 08:30:00.185975 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c104a09-4470-423c-8cac-90ea9bd21797" containerName="registry-server" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.185982 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c104a09-4470-423c-8cac-90ea9bd21797" containerName="registry-server" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.186158 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c104a09-4470-423c-8cac-90ea9bd21797" containerName="registry-server" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.186766 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.189745 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.190328 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.203294 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr"] Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.329279 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8123b5d3-f0fb-4fff-a528-5e67d981d28c-config-volume\") pod \"collect-profiles-29412510-s6wwr\" (UID: \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.329669 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8123b5d3-f0fb-4fff-a528-5e67d981d28c-secret-volume\") pod \"collect-profiles-29412510-s6wwr\" (UID: \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.329817 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp4mg\" (UniqueName: \"kubernetes.io/projected/8123b5d3-f0fb-4fff-a528-5e67d981d28c-kube-api-access-rp4mg\") pod \"collect-profiles-29412510-s6wwr\" (UID: \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.431632 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8123b5d3-f0fb-4fff-a528-5e67d981d28c-secret-volume\") pod \"collect-profiles-29412510-s6wwr\" (UID: \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.431714 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp4mg\" (UniqueName: \"kubernetes.io/projected/8123b5d3-f0fb-4fff-a528-5e67d981d28c-kube-api-access-rp4mg\") pod \"collect-profiles-29412510-s6wwr\" (UID: \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.431825 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8123b5d3-f0fb-4fff-a528-5e67d981d28c-config-volume\") pod \"collect-profiles-29412510-s6wwr\" (UID: \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.432803 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8123b5d3-f0fb-4fff-a528-5e67d981d28c-config-volume\") pod \"collect-profiles-29412510-s6wwr\" (UID: \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.449537 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8123b5d3-f0fb-4fff-a528-5e67d981d28c-secret-volume\") pod \"collect-profiles-29412510-s6wwr\" (UID: \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.450680 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp4mg\" (UniqueName: \"kubernetes.io/projected/8123b5d3-f0fb-4fff-a528-5e67d981d28c-kube-api-access-rp4mg\") pod \"collect-profiles-29412510-s6wwr\" (UID: \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" Dec 03 08:30:00 crc kubenswrapper[4612]: I1203 08:30:00.547291 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" Dec 03 08:30:01 crc kubenswrapper[4612]: I1203 08:30:01.018568 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr"] Dec 03 08:30:01 crc kubenswrapper[4612]: I1203 08:30:01.133070 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" event={"ID":"8123b5d3-f0fb-4fff-a528-5e67d981d28c","Type":"ContainerStarted","Data":"f5a781732b1431baaeab2ffcf9abcd6a38acd5225c7c5e60f6761a66c746a0a2"} Dec 03 08:30:02 crc kubenswrapper[4612]: I1203 08:30:02.153741 4612 generic.go:334] "Generic (PLEG): container finished" podID="8123b5d3-f0fb-4fff-a528-5e67d981d28c" containerID="f17d2eb4da24c23602d0b9019d39d2dfdd2c8bae0a7b5527d59f5885db3bd401" exitCode=0 Dec 03 08:30:02 crc kubenswrapper[4612]: I1203 08:30:02.153849 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" event={"ID":"8123b5d3-f0fb-4fff-a528-5e67d981d28c","Type":"ContainerDied","Data":"f17d2eb4da24c23602d0b9019d39d2dfdd2c8bae0a7b5527d59f5885db3bd401"} Dec 03 08:30:03 crc kubenswrapper[4612]: I1203 08:30:03.674581 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" Dec 03 08:30:03 crc kubenswrapper[4612]: I1203 08:30:03.799009 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8123b5d3-f0fb-4fff-a528-5e67d981d28c-secret-volume\") pod \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\" (UID: \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\") " Dec 03 08:30:03 crc kubenswrapper[4612]: I1203 08:30:03.799192 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8123b5d3-f0fb-4fff-a528-5e67d981d28c-config-volume\") pod \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\" (UID: \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\") " Dec 03 08:30:03 crc kubenswrapper[4612]: I1203 08:30:03.799250 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rp4mg\" (UniqueName: \"kubernetes.io/projected/8123b5d3-f0fb-4fff-a528-5e67d981d28c-kube-api-access-rp4mg\") pod \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\" (UID: \"8123b5d3-f0fb-4fff-a528-5e67d981d28c\") " Dec 03 08:30:03 crc kubenswrapper[4612]: I1203 08:30:03.799921 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8123b5d3-f0fb-4fff-a528-5e67d981d28c-config-volume" (OuterVolumeSpecName: "config-volume") pod "8123b5d3-f0fb-4fff-a528-5e67d981d28c" (UID: "8123b5d3-f0fb-4fff-a528-5e67d981d28c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:30:03 crc kubenswrapper[4612]: I1203 08:30:03.804672 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8123b5d3-f0fb-4fff-a528-5e67d981d28c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8123b5d3-f0fb-4fff-a528-5e67d981d28c" (UID: "8123b5d3-f0fb-4fff-a528-5e67d981d28c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:30:03 crc kubenswrapper[4612]: I1203 08:30:03.808171 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8123b5d3-f0fb-4fff-a528-5e67d981d28c-kube-api-access-rp4mg" (OuterVolumeSpecName: "kube-api-access-rp4mg") pod "8123b5d3-f0fb-4fff-a528-5e67d981d28c" (UID: "8123b5d3-f0fb-4fff-a528-5e67d981d28c"). InnerVolumeSpecName "kube-api-access-rp4mg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:30:03 crc kubenswrapper[4612]: I1203 08:30:03.901072 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rp4mg\" (UniqueName: \"kubernetes.io/projected/8123b5d3-f0fb-4fff-a528-5e67d981d28c-kube-api-access-rp4mg\") on node \"crc\" DevicePath \"\"" Dec 03 08:30:03 crc kubenswrapper[4612]: I1203 08:30:03.901104 4612 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8123b5d3-f0fb-4fff-a528-5e67d981d28c-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 08:30:03 crc kubenswrapper[4612]: I1203 08:30:03.901115 4612 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8123b5d3-f0fb-4fff-a528-5e67d981d28c-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 08:30:04 crc kubenswrapper[4612]: I1203 08:30:04.177073 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" event={"ID":"8123b5d3-f0fb-4fff-a528-5e67d981d28c","Type":"ContainerDied","Data":"f5a781732b1431baaeab2ffcf9abcd6a38acd5225c7c5e60f6761a66c746a0a2"} Dec 03 08:30:04 crc kubenswrapper[4612]: I1203 08:30:04.177107 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5a781732b1431baaeab2ffcf9abcd6a38acd5225c7c5e60f6761a66c746a0a2" Dec 03 08:30:04 crc kubenswrapper[4612]: I1203 08:30:04.177122 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-s6wwr" Dec 03 08:30:04 crc kubenswrapper[4612]: I1203 08:30:04.785357 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg"] Dec 03 08:30:04 crc kubenswrapper[4612]: I1203 08:30:04.790849 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412465-r9brg"] Dec 03 08:30:05 crc kubenswrapper[4612]: I1203 08:30:05.110822 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b731854-5aa4-4cfd-b4aa-ea210d24c3a6" path="/var/lib/kubelet/pods/5b731854-5aa4-4cfd-b4aa-ea210d24c3a6/volumes" Dec 03 08:30:06 crc kubenswrapper[4612]: I1203 08:30:06.089697 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:30:06 crc kubenswrapper[4612]: E1203 08:30:06.090969 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:30:20 crc kubenswrapper[4612]: I1203 08:30:20.089588 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:30:20 crc kubenswrapper[4612]: E1203 08:30:20.090413 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:30:32 crc kubenswrapper[4612]: I1203 08:30:32.089547 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:30:32 crc kubenswrapper[4612]: E1203 08:30:32.090476 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:30:45 crc kubenswrapper[4612]: I1203 08:30:45.089536 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:30:45 crc kubenswrapper[4612]: E1203 08:30:45.091149 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:30:46 crc kubenswrapper[4612]: I1203 08:30:46.283533 4612 scope.go:117] "RemoveContainer" containerID="2817990cf1a9567157bc4783c7855485777f1e5b7e75a8f4cc6ac1daca9e0b60" Dec 03 08:30:59 crc kubenswrapper[4612]: I1203 08:30:59.089700 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:30:59 crc kubenswrapper[4612]: E1203 08:30:59.090796 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:31:13 crc kubenswrapper[4612]: I1203 08:31:13.090114 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:31:13 crc kubenswrapper[4612]: E1203 08:31:13.090980 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:31:25 crc kubenswrapper[4612]: I1203 08:31:25.089266 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:31:25 crc kubenswrapper[4612]: E1203 08:31:25.090237 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:31:36 crc kubenswrapper[4612]: I1203 08:31:36.089628 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:31:36 crc kubenswrapper[4612]: E1203 08:31:36.090374 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:31:47 crc kubenswrapper[4612]: I1203 08:31:47.097147 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:31:47 crc kubenswrapper[4612]: E1203 08:31:47.097964 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:32:02 crc kubenswrapper[4612]: I1203 08:32:02.090225 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:32:02 crc kubenswrapper[4612]: E1203 08:32:02.092741 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:32:15 crc kubenswrapper[4612]: I1203 08:32:15.089689 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:32:15 crc kubenswrapper[4612]: E1203 08:32:15.090504 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:32:28 crc kubenswrapper[4612]: I1203 08:32:28.089378 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:32:28 crc kubenswrapper[4612]: I1203 08:32:28.516216 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"b18fba5188318493cf361f53dfcbb2b755a0924fb79a1d52a2e107d6715f35cd"} Dec 03 08:34:47 crc kubenswrapper[4612]: I1203 08:34:47.135437 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:34:47 crc kubenswrapper[4612]: I1203 08:34:47.135929 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.260634 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ll5dl"] Dec 03 08:34:59 crc kubenswrapper[4612]: E1203 08:34:59.261549 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8123b5d3-f0fb-4fff-a528-5e67d981d28c" containerName="collect-profiles" Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.261563 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="8123b5d3-f0fb-4fff-a528-5e67d981d28c" containerName="collect-profiles" Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.261743 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="8123b5d3-f0fb-4fff-a528-5e67d981d28c" containerName="collect-profiles" Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.263012 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.283181 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ll5dl"] Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.415495 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1caa93d0-5bbc-49c0-be42-86707d74e591-catalog-content\") pod \"redhat-operators-ll5dl\" (UID: \"1caa93d0-5bbc-49c0-be42-86707d74e591\") " pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.415560 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p9nb\" (UniqueName: \"kubernetes.io/projected/1caa93d0-5bbc-49c0-be42-86707d74e591-kube-api-access-2p9nb\") pod \"redhat-operators-ll5dl\" (UID: \"1caa93d0-5bbc-49c0-be42-86707d74e591\") " pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.415626 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1caa93d0-5bbc-49c0-be42-86707d74e591-utilities\") pod \"redhat-operators-ll5dl\" (UID: \"1caa93d0-5bbc-49c0-be42-86707d74e591\") " pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.517438 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1caa93d0-5bbc-49c0-be42-86707d74e591-utilities\") pod \"redhat-operators-ll5dl\" (UID: \"1caa93d0-5bbc-49c0-be42-86707d74e591\") " pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.517601 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1caa93d0-5bbc-49c0-be42-86707d74e591-catalog-content\") pod \"redhat-operators-ll5dl\" (UID: \"1caa93d0-5bbc-49c0-be42-86707d74e591\") " pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.517658 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p9nb\" (UniqueName: \"kubernetes.io/projected/1caa93d0-5bbc-49c0-be42-86707d74e591-kube-api-access-2p9nb\") pod \"redhat-operators-ll5dl\" (UID: \"1caa93d0-5bbc-49c0-be42-86707d74e591\") " pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.518087 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1caa93d0-5bbc-49c0-be42-86707d74e591-catalog-content\") pod \"redhat-operators-ll5dl\" (UID: \"1caa93d0-5bbc-49c0-be42-86707d74e591\") " pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.518096 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1caa93d0-5bbc-49c0-be42-86707d74e591-utilities\") pod \"redhat-operators-ll5dl\" (UID: \"1caa93d0-5bbc-49c0-be42-86707d74e591\") " pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.550021 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p9nb\" (UniqueName: \"kubernetes.io/projected/1caa93d0-5bbc-49c0-be42-86707d74e591-kube-api-access-2p9nb\") pod \"redhat-operators-ll5dl\" (UID: \"1caa93d0-5bbc-49c0-be42-86707d74e591\") " pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:34:59 crc kubenswrapper[4612]: I1203 08:34:59.590784 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:35:00 crc kubenswrapper[4612]: I1203 08:35:00.134302 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ll5dl"] Dec 03 08:35:00 crc kubenswrapper[4612]: I1203 08:35:00.854692 4612 generic.go:334] "Generic (PLEG): container finished" podID="1caa93d0-5bbc-49c0-be42-86707d74e591" containerID="70e5fbf11bf8794c53e6bd055586364f7367c5c9e767ae046b0c4a0fa5050769" exitCode=0 Dec 03 08:35:00 crc kubenswrapper[4612]: I1203 08:35:00.854866 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ll5dl" event={"ID":"1caa93d0-5bbc-49c0-be42-86707d74e591","Type":"ContainerDied","Data":"70e5fbf11bf8794c53e6bd055586364f7367c5c9e767ae046b0c4a0fa5050769"} Dec 03 08:35:00 crc kubenswrapper[4612]: I1203 08:35:00.856427 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ll5dl" event={"ID":"1caa93d0-5bbc-49c0-be42-86707d74e591","Type":"ContainerStarted","Data":"ef91d71270e380859e7e3d2ed7c26480f6bcc2d5db9ac7df3fdde5637a60b38e"} Dec 03 08:35:00 crc kubenswrapper[4612]: I1203 08:35:00.857560 4612 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 08:35:01 crc kubenswrapper[4612]: I1203 08:35:01.865756 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ll5dl" event={"ID":"1caa93d0-5bbc-49c0-be42-86707d74e591","Type":"ContainerStarted","Data":"38b56bacc40555491e9e902e6c3584fcedb557db375c5b299d77dba1789258e1"} Dec 03 08:35:04 crc kubenswrapper[4612]: I1203 08:35:04.930417 4612 generic.go:334] "Generic (PLEG): container finished" podID="1caa93d0-5bbc-49c0-be42-86707d74e591" containerID="38b56bacc40555491e9e902e6c3584fcedb557db375c5b299d77dba1789258e1" exitCode=0 Dec 03 08:35:04 crc kubenswrapper[4612]: I1203 08:35:04.931121 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ll5dl" event={"ID":"1caa93d0-5bbc-49c0-be42-86707d74e591","Type":"ContainerDied","Data":"38b56bacc40555491e9e902e6c3584fcedb557db375c5b299d77dba1789258e1"} Dec 03 08:35:05 crc kubenswrapper[4612]: I1203 08:35:05.940898 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ll5dl" event={"ID":"1caa93d0-5bbc-49c0-be42-86707d74e591","Type":"ContainerStarted","Data":"5296aa780b6053f7662eddce28184c393ffe60dd4a1bc969a1318e0cdd082b99"} Dec 03 08:35:05 crc kubenswrapper[4612]: I1203 08:35:05.968223 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ll5dl" podStartSLOduration=2.2525667719999998 podStartE2EDuration="6.968201546s" podCreationTimestamp="2025-12-03 08:34:59 +0000 UTC" firstStartedPulling="2025-12-03 08:35:00.857342066 +0000 UTC m=+4064.030699466" lastFinishedPulling="2025-12-03 08:35:05.57297684 +0000 UTC m=+4068.746334240" observedRunningTime="2025-12-03 08:35:05.961588943 +0000 UTC m=+4069.134946353" watchObservedRunningTime="2025-12-03 08:35:05.968201546 +0000 UTC m=+4069.141558946" Dec 03 08:35:09 crc kubenswrapper[4612]: I1203 08:35:09.591598 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:35:09 crc kubenswrapper[4612]: I1203 08:35:09.592105 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:35:10 crc kubenswrapper[4612]: I1203 08:35:10.638834 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ll5dl" podUID="1caa93d0-5bbc-49c0-be42-86707d74e591" containerName="registry-server" probeResult="failure" output=< Dec 03 08:35:10 crc kubenswrapper[4612]: timeout: failed to connect service ":50051" within 1s Dec 03 08:35:10 crc kubenswrapper[4612]: > Dec 03 08:35:17 crc kubenswrapper[4612]: I1203 08:35:17.136382 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:35:17 crc kubenswrapper[4612]: I1203 08:35:17.136912 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:35:19 crc kubenswrapper[4612]: I1203 08:35:19.644471 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:35:19 crc kubenswrapper[4612]: I1203 08:35:19.722657 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:35:19 crc kubenswrapper[4612]: I1203 08:35:19.886373 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ll5dl"] Dec 03 08:35:21 crc kubenswrapper[4612]: I1203 08:35:21.098459 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ll5dl" podUID="1caa93d0-5bbc-49c0-be42-86707d74e591" containerName="registry-server" containerID="cri-o://5296aa780b6053f7662eddce28184c393ffe60dd4a1bc969a1318e0cdd082b99" gracePeriod=2 Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.002482 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.109222 4612 generic.go:334] "Generic (PLEG): container finished" podID="1caa93d0-5bbc-49c0-be42-86707d74e591" containerID="5296aa780b6053f7662eddce28184c393ffe60dd4a1bc969a1318e0cdd082b99" exitCode=0 Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.109263 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ll5dl" event={"ID":"1caa93d0-5bbc-49c0-be42-86707d74e591","Type":"ContainerDied","Data":"5296aa780b6053f7662eddce28184c393ffe60dd4a1bc969a1318e0cdd082b99"} Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.109296 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ll5dl" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.109321 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ll5dl" event={"ID":"1caa93d0-5bbc-49c0-be42-86707d74e591","Type":"ContainerDied","Data":"ef91d71270e380859e7e3d2ed7c26480f6bcc2d5db9ac7df3fdde5637a60b38e"} Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.109345 4612 scope.go:117] "RemoveContainer" containerID="5296aa780b6053f7662eddce28184c393ffe60dd4a1bc969a1318e0cdd082b99" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.133094 4612 scope.go:117] "RemoveContainer" containerID="38b56bacc40555491e9e902e6c3584fcedb557db375c5b299d77dba1789258e1" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.148514 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1caa93d0-5bbc-49c0-be42-86707d74e591-utilities\") pod \"1caa93d0-5bbc-49c0-be42-86707d74e591\" (UID: \"1caa93d0-5bbc-49c0-be42-86707d74e591\") " Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.148710 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2p9nb\" (UniqueName: \"kubernetes.io/projected/1caa93d0-5bbc-49c0-be42-86707d74e591-kube-api-access-2p9nb\") pod \"1caa93d0-5bbc-49c0-be42-86707d74e591\" (UID: \"1caa93d0-5bbc-49c0-be42-86707d74e591\") " Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.148829 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1caa93d0-5bbc-49c0-be42-86707d74e591-catalog-content\") pod \"1caa93d0-5bbc-49c0-be42-86707d74e591\" (UID: \"1caa93d0-5bbc-49c0-be42-86707d74e591\") " Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.154601 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1caa93d0-5bbc-49c0-be42-86707d74e591-kube-api-access-2p9nb" (OuterVolumeSpecName: "kube-api-access-2p9nb") pod "1caa93d0-5bbc-49c0-be42-86707d74e591" (UID: "1caa93d0-5bbc-49c0-be42-86707d74e591"). InnerVolumeSpecName "kube-api-access-2p9nb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.165853 4612 scope.go:117] "RemoveContainer" containerID="70e5fbf11bf8794c53e6bd055586364f7367c5c9e767ae046b0c4a0fa5050769" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.169404 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1caa93d0-5bbc-49c0-be42-86707d74e591-utilities" (OuterVolumeSpecName: "utilities") pod "1caa93d0-5bbc-49c0-be42-86707d74e591" (UID: "1caa93d0-5bbc-49c0-be42-86707d74e591"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.240132 4612 scope.go:117] "RemoveContainer" containerID="5296aa780b6053f7662eddce28184c393ffe60dd4a1bc969a1318e0cdd082b99" Dec 03 08:35:22 crc kubenswrapper[4612]: E1203 08:35:22.240595 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5296aa780b6053f7662eddce28184c393ffe60dd4a1bc969a1318e0cdd082b99\": container with ID starting with 5296aa780b6053f7662eddce28184c393ffe60dd4a1bc969a1318e0cdd082b99 not found: ID does not exist" containerID="5296aa780b6053f7662eddce28184c393ffe60dd4a1bc969a1318e0cdd082b99" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.240653 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5296aa780b6053f7662eddce28184c393ffe60dd4a1bc969a1318e0cdd082b99"} err="failed to get container status \"5296aa780b6053f7662eddce28184c393ffe60dd4a1bc969a1318e0cdd082b99\": rpc error: code = NotFound desc = could not find container \"5296aa780b6053f7662eddce28184c393ffe60dd4a1bc969a1318e0cdd082b99\": container with ID starting with 5296aa780b6053f7662eddce28184c393ffe60dd4a1bc969a1318e0cdd082b99 not found: ID does not exist" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.240693 4612 scope.go:117] "RemoveContainer" containerID="38b56bacc40555491e9e902e6c3584fcedb557db375c5b299d77dba1789258e1" Dec 03 08:35:22 crc kubenswrapper[4612]: E1203 08:35:22.241098 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38b56bacc40555491e9e902e6c3584fcedb557db375c5b299d77dba1789258e1\": container with ID starting with 38b56bacc40555491e9e902e6c3584fcedb557db375c5b299d77dba1789258e1 not found: ID does not exist" containerID="38b56bacc40555491e9e902e6c3584fcedb557db375c5b299d77dba1789258e1" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.241139 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38b56bacc40555491e9e902e6c3584fcedb557db375c5b299d77dba1789258e1"} err="failed to get container status \"38b56bacc40555491e9e902e6c3584fcedb557db375c5b299d77dba1789258e1\": rpc error: code = NotFound desc = could not find container \"38b56bacc40555491e9e902e6c3584fcedb557db375c5b299d77dba1789258e1\": container with ID starting with 38b56bacc40555491e9e902e6c3584fcedb557db375c5b299d77dba1789258e1 not found: ID does not exist" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.241169 4612 scope.go:117] "RemoveContainer" containerID="70e5fbf11bf8794c53e6bd055586364f7367c5c9e767ae046b0c4a0fa5050769" Dec 03 08:35:22 crc kubenswrapper[4612]: E1203 08:35:22.241381 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70e5fbf11bf8794c53e6bd055586364f7367c5c9e767ae046b0c4a0fa5050769\": container with ID starting with 70e5fbf11bf8794c53e6bd055586364f7367c5c9e767ae046b0c4a0fa5050769 not found: ID does not exist" containerID="70e5fbf11bf8794c53e6bd055586364f7367c5c9e767ae046b0c4a0fa5050769" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.241403 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70e5fbf11bf8794c53e6bd055586364f7367c5c9e767ae046b0c4a0fa5050769"} err="failed to get container status \"70e5fbf11bf8794c53e6bd055586364f7367c5c9e767ae046b0c4a0fa5050769\": rpc error: code = NotFound desc = could not find container \"70e5fbf11bf8794c53e6bd055586364f7367c5c9e767ae046b0c4a0fa5050769\": container with ID starting with 70e5fbf11bf8794c53e6bd055586364f7367c5c9e767ae046b0c4a0fa5050769 not found: ID does not exist" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.251844 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2p9nb\" (UniqueName: \"kubernetes.io/projected/1caa93d0-5bbc-49c0-be42-86707d74e591-kube-api-access-2p9nb\") on node \"crc\" DevicePath \"\"" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.251875 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1caa93d0-5bbc-49c0-be42-86707d74e591-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.260828 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1caa93d0-5bbc-49c0-be42-86707d74e591-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1caa93d0-5bbc-49c0-be42-86707d74e591" (UID: "1caa93d0-5bbc-49c0-be42-86707d74e591"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.353421 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1caa93d0-5bbc-49c0-be42-86707d74e591-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.444086 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ll5dl"] Dec 03 08:35:22 crc kubenswrapper[4612]: I1203 08:35:22.452263 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ll5dl"] Dec 03 08:35:23 crc kubenswrapper[4612]: I1203 08:35:23.102667 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1caa93d0-5bbc-49c0-be42-86707d74e591" path="/var/lib/kubelet/pods/1caa93d0-5bbc-49c0-be42-86707d74e591/volumes" Dec 03 08:35:47 crc kubenswrapper[4612]: I1203 08:35:47.136074 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:35:47 crc kubenswrapper[4612]: I1203 08:35:47.136598 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:35:47 crc kubenswrapper[4612]: I1203 08:35:47.136641 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 08:35:47 crc kubenswrapper[4612]: I1203 08:35:47.137329 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b18fba5188318493cf361f53dfcbb2b755a0924fb79a1d52a2e107d6715f35cd"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:35:47 crc kubenswrapper[4612]: I1203 08:35:47.137387 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://b18fba5188318493cf361f53dfcbb2b755a0924fb79a1d52a2e107d6715f35cd" gracePeriod=600 Dec 03 08:35:47 crc kubenswrapper[4612]: I1203 08:35:47.438563 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="b18fba5188318493cf361f53dfcbb2b755a0924fb79a1d52a2e107d6715f35cd" exitCode=0 Dec 03 08:35:47 crc kubenswrapper[4612]: I1203 08:35:47.438753 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"b18fba5188318493cf361f53dfcbb2b755a0924fb79a1d52a2e107d6715f35cd"} Dec 03 08:35:47 crc kubenswrapper[4612]: I1203 08:35:47.438974 4612 scope.go:117] "RemoveContainer" containerID="36fd590eea45cb2bb1f01ccd45321bc2a785caa746563e41cf0dc64c44ce910d" Dec 03 08:35:48 crc kubenswrapper[4612]: I1203 08:35:48.452787 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44"} Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.363605 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xvm82"] Dec 03 08:36:27 crc kubenswrapper[4612]: E1203 08:36:27.364916 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1caa93d0-5bbc-49c0-be42-86707d74e591" containerName="registry-server" Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.364933 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1caa93d0-5bbc-49c0-be42-86707d74e591" containerName="registry-server" Dec 03 08:36:27 crc kubenswrapper[4612]: E1203 08:36:27.364991 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1caa93d0-5bbc-49c0-be42-86707d74e591" containerName="extract-content" Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.364999 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1caa93d0-5bbc-49c0-be42-86707d74e591" containerName="extract-content" Dec 03 08:36:27 crc kubenswrapper[4612]: E1203 08:36:27.365044 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1caa93d0-5bbc-49c0-be42-86707d74e591" containerName="extract-utilities" Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.365076 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1caa93d0-5bbc-49c0-be42-86707d74e591" containerName="extract-utilities" Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.365407 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="1caa93d0-5bbc-49c0-be42-86707d74e591" containerName="registry-server" Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.377738 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.386504 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvm82"] Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.433670 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31250c24-766e-4264-865c-4081c7a487dc-catalog-content\") pod \"redhat-marketplace-xvm82\" (UID: \"31250c24-766e-4264-865c-4081c7a487dc\") " pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.433776 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31250c24-766e-4264-865c-4081c7a487dc-utilities\") pod \"redhat-marketplace-xvm82\" (UID: \"31250c24-766e-4264-865c-4081c7a487dc\") " pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.433799 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz66x\" (UniqueName: \"kubernetes.io/projected/31250c24-766e-4264-865c-4081c7a487dc-kube-api-access-zz66x\") pod \"redhat-marketplace-xvm82\" (UID: \"31250c24-766e-4264-865c-4081c7a487dc\") " pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.535371 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31250c24-766e-4264-865c-4081c7a487dc-catalog-content\") pod \"redhat-marketplace-xvm82\" (UID: \"31250c24-766e-4264-865c-4081c7a487dc\") " pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.535657 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31250c24-766e-4264-865c-4081c7a487dc-utilities\") pod \"redhat-marketplace-xvm82\" (UID: \"31250c24-766e-4264-865c-4081c7a487dc\") " pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.535682 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz66x\" (UniqueName: \"kubernetes.io/projected/31250c24-766e-4264-865c-4081c7a487dc-kube-api-access-zz66x\") pod \"redhat-marketplace-xvm82\" (UID: \"31250c24-766e-4264-865c-4081c7a487dc\") " pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.535846 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31250c24-766e-4264-865c-4081c7a487dc-catalog-content\") pod \"redhat-marketplace-xvm82\" (UID: \"31250c24-766e-4264-865c-4081c7a487dc\") " pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.536160 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31250c24-766e-4264-865c-4081c7a487dc-utilities\") pod \"redhat-marketplace-xvm82\" (UID: \"31250c24-766e-4264-865c-4081c7a487dc\") " pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:27 crc kubenswrapper[4612]: I1203 08:36:27.995985 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz66x\" (UniqueName: \"kubernetes.io/projected/31250c24-766e-4264-865c-4081c7a487dc-kube-api-access-zz66x\") pod \"redhat-marketplace-xvm82\" (UID: \"31250c24-766e-4264-865c-4081c7a487dc\") " pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:28 crc kubenswrapper[4612]: I1203 08:36:28.015747 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:28 crc kubenswrapper[4612]: I1203 08:36:28.533188 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvm82"] Dec 03 08:36:28 crc kubenswrapper[4612]: I1203 08:36:28.874107 4612 generic.go:334] "Generic (PLEG): container finished" podID="31250c24-766e-4264-865c-4081c7a487dc" containerID="1b60e0b1cf53309f60fc1b88a32757c5a92a172232700a504e68d1263cd18630" exitCode=0 Dec 03 08:36:28 crc kubenswrapper[4612]: I1203 08:36:28.874161 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvm82" event={"ID":"31250c24-766e-4264-865c-4081c7a487dc","Type":"ContainerDied","Data":"1b60e0b1cf53309f60fc1b88a32757c5a92a172232700a504e68d1263cd18630"} Dec 03 08:36:28 crc kubenswrapper[4612]: I1203 08:36:28.874192 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvm82" event={"ID":"31250c24-766e-4264-865c-4081c7a487dc","Type":"ContainerStarted","Data":"79ba9f9b67d03fce30a61747d36fa26bb274ba7dfd8ea58419309ec92a307283"} Dec 03 08:36:30 crc kubenswrapper[4612]: I1203 08:36:30.893436 4612 generic.go:334] "Generic (PLEG): container finished" podID="31250c24-766e-4264-865c-4081c7a487dc" containerID="a1787b15e87d63435ab6d1d6a9fb6a06ee0fab702b25b55e4ca47afae8fb947e" exitCode=0 Dec 03 08:36:30 crc kubenswrapper[4612]: I1203 08:36:30.893537 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvm82" event={"ID":"31250c24-766e-4264-865c-4081c7a487dc","Type":"ContainerDied","Data":"a1787b15e87d63435ab6d1d6a9fb6a06ee0fab702b25b55e4ca47afae8fb947e"} Dec 03 08:36:31 crc kubenswrapper[4612]: I1203 08:36:31.905357 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvm82" event={"ID":"31250c24-766e-4264-865c-4081c7a487dc","Type":"ContainerStarted","Data":"bdd19c3e9f08908a6c831350345907ef9146e816f2a1c519d17ad21ab4b1ae53"} Dec 03 08:36:31 crc kubenswrapper[4612]: I1203 08:36:31.940116 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xvm82" podStartSLOduration=2.502418466 podStartE2EDuration="4.940093953s" podCreationTimestamp="2025-12-03 08:36:27 +0000 UTC" firstStartedPulling="2025-12-03 08:36:28.877252184 +0000 UTC m=+4152.050609594" lastFinishedPulling="2025-12-03 08:36:31.314927681 +0000 UTC m=+4154.488285081" observedRunningTime="2025-12-03 08:36:31.92543824 +0000 UTC m=+4155.098795640" watchObservedRunningTime="2025-12-03 08:36:31.940093953 +0000 UTC m=+4155.113451353" Dec 03 08:36:38 crc kubenswrapper[4612]: I1203 08:36:38.016689 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:38 crc kubenswrapper[4612]: I1203 08:36:38.017286 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:38 crc kubenswrapper[4612]: I1203 08:36:38.087889 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.055340 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.065128 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5nhgx"] Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.067019 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.077214 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5nhgx"] Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.088316 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3291f04a-d222-4d3c-bc04-de160793563e-catalog-content\") pod \"certified-operators-5nhgx\" (UID: \"3291f04a-d222-4d3c-bc04-de160793563e\") " pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.088381 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgj25\" (UniqueName: \"kubernetes.io/projected/3291f04a-d222-4d3c-bc04-de160793563e-kube-api-access-vgj25\") pod \"certified-operators-5nhgx\" (UID: \"3291f04a-d222-4d3c-bc04-de160793563e\") " pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.088414 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3291f04a-d222-4d3c-bc04-de160793563e-utilities\") pod \"certified-operators-5nhgx\" (UID: \"3291f04a-d222-4d3c-bc04-de160793563e\") " pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.193148 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3291f04a-d222-4d3c-bc04-de160793563e-catalog-content\") pod \"certified-operators-5nhgx\" (UID: \"3291f04a-d222-4d3c-bc04-de160793563e\") " pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.195451 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgj25\" (UniqueName: \"kubernetes.io/projected/3291f04a-d222-4d3c-bc04-de160793563e-kube-api-access-vgj25\") pod \"certified-operators-5nhgx\" (UID: \"3291f04a-d222-4d3c-bc04-de160793563e\") " pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.195506 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3291f04a-d222-4d3c-bc04-de160793563e-utilities\") pod \"certified-operators-5nhgx\" (UID: \"3291f04a-d222-4d3c-bc04-de160793563e\") " pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.195298 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3291f04a-d222-4d3c-bc04-de160793563e-catalog-content\") pod \"certified-operators-5nhgx\" (UID: \"3291f04a-d222-4d3c-bc04-de160793563e\") " pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.195989 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3291f04a-d222-4d3c-bc04-de160793563e-utilities\") pod \"certified-operators-5nhgx\" (UID: \"3291f04a-d222-4d3c-bc04-de160793563e\") " pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.222297 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgj25\" (UniqueName: \"kubernetes.io/projected/3291f04a-d222-4d3c-bc04-de160793563e-kube-api-access-vgj25\") pod \"certified-operators-5nhgx\" (UID: \"3291f04a-d222-4d3c-bc04-de160793563e\") " pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.393888 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.470359 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b8m6z"] Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.475120 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.504427 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1629733a-fe08-499d-b100-d92519de0a5c-utilities\") pod \"community-operators-b8m6z\" (UID: \"1629733a-fe08-499d-b100-d92519de0a5c\") " pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.504762 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1629733a-fe08-499d-b100-d92519de0a5c-catalog-content\") pod \"community-operators-b8m6z\" (UID: \"1629733a-fe08-499d-b100-d92519de0a5c\") " pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.504800 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfxn7\" (UniqueName: \"kubernetes.io/projected/1629733a-fe08-499d-b100-d92519de0a5c-kube-api-access-cfxn7\") pod \"community-operators-b8m6z\" (UID: \"1629733a-fe08-499d-b100-d92519de0a5c\") " pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.529995 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b8m6z"] Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.608023 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1629733a-fe08-499d-b100-d92519de0a5c-catalog-content\") pod \"community-operators-b8m6z\" (UID: \"1629733a-fe08-499d-b100-d92519de0a5c\") " pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.608068 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfxn7\" (UniqueName: \"kubernetes.io/projected/1629733a-fe08-499d-b100-d92519de0a5c-kube-api-access-cfxn7\") pod \"community-operators-b8m6z\" (UID: \"1629733a-fe08-499d-b100-d92519de0a5c\") " pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.608124 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1629733a-fe08-499d-b100-d92519de0a5c-utilities\") pod \"community-operators-b8m6z\" (UID: \"1629733a-fe08-499d-b100-d92519de0a5c\") " pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.608530 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1629733a-fe08-499d-b100-d92519de0a5c-utilities\") pod \"community-operators-b8m6z\" (UID: \"1629733a-fe08-499d-b100-d92519de0a5c\") " pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.608740 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1629733a-fe08-499d-b100-d92519de0a5c-catalog-content\") pod \"community-operators-b8m6z\" (UID: \"1629733a-fe08-499d-b100-d92519de0a5c\") " pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.663284 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfxn7\" (UniqueName: \"kubernetes.io/projected/1629733a-fe08-499d-b100-d92519de0a5c-kube-api-access-cfxn7\") pod \"community-operators-b8m6z\" (UID: \"1629733a-fe08-499d-b100-d92519de0a5c\") " pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:39 crc kubenswrapper[4612]: I1203 08:36:39.831822 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:40 crc kubenswrapper[4612]: I1203 08:36:40.087967 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5nhgx"] Dec 03 08:36:40 crc kubenswrapper[4612]: I1203 08:36:40.421059 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b8m6z"] Dec 03 08:36:40 crc kubenswrapper[4612]: W1203 08:36:40.429902 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1629733a_fe08_499d_b100_d92519de0a5c.slice/crio-e8c018d6f253207d2955f4f3a3a91f7e2a92a21719884369a25c9efb43eddc67 WatchSource:0}: Error finding container e8c018d6f253207d2955f4f3a3a91f7e2a92a21719884369a25c9efb43eddc67: Status 404 returned error can't find the container with id e8c018d6f253207d2955f4f3a3a91f7e2a92a21719884369a25c9efb43eddc67 Dec 03 08:36:41 crc kubenswrapper[4612]: I1203 08:36:41.027849 4612 generic.go:334] "Generic (PLEG): container finished" podID="1629733a-fe08-499d-b100-d92519de0a5c" containerID="2ad3d55b391a1eeede50359d46181c667d1cebb2f11f784fd9fb8bf5c720b173" exitCode=0 Dec 03 08:36:41 crc kubenswrapper[4612]: I1203 08:36:41.029386 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b8m6z" event={"ID":"1629733a-fe08-499d-b100-d92519de0a5c","Type":"ContainerDied","Data":"2ad3d55b391a1eeede50359d46181c667d1cebb2f11f784fd9fb8bf5c720b173"} Dec 03 08:36:41 crc kubenswrapper[4612]: I1203 08:36:41.029431 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b8m6z" event={"ID":"1629733a-fe08-499d-b100-d92519de0a5c","Type":"ContainerStarted","Data":"e8c018d6f253207d2955f4f3a3a91f7e2a92a21719884369a25c9efb43eddc67"} Dec 03 08:36:41 crc kubenswrapper[4612]: I1203 08:36:41.036260 4612 generic.go:334] "Generic (PLEG): container finished" podID="3291f04a-d222-4d3c-bc04-de160793563e" containerID="69222b5de348a24a26ca82710e269e981bb0e68ba31af6385f7e551e4dca3da4" exitCode=0 Dec 03 08:36:41 crc kubenswrapper[4612]: I1203 08:36:41.036311 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nhgx" event={"ID":"3291f04a-d222-4d3c-bc04-de160793563e","Type":"ContainerDied","Data":"69222b5de348a24a26ca82710e269e981bb0e68ba31af6385f7e551e4dca3da4"} Dec 03 08:36:41 crc kubenswrapper[4612]: I1203 08:36:41.036335 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nhgx" event={"ID":"3291f04a-d222-4d3c-bc04-de160793563e","Type":"ContainerStarted","Data":"8c04782c7821631609624c8ed8fbc6631e7f03f27f290ac935816aaafe8db9bc"} Dec 03 08:36:42 crc kubenswrapper[4612]: I1203 08:36:42.050681 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b8m6z" event={"ID":"1629733a-fe08-499d-b100-d92519de0a5c","Type":"ContainerStarted","Data":"1ad4226348bdc55eb6949411da7c71e1f267bfc6e8645513b8a9a8767c0265f7"} Dec 03 08:36:43 crc kubenswrapper[4612]: I1203 08:36:43.061323 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nhgx" event={"ID":"3291f04a-d222-4d3c-bc04-de160793563e","Type":"ContainerStarted","Data":"b4362fe4f0f54ca723c26b4c1ab459bd67e76b4234a8ab370f1f4a1413043cfe"} Dec 03 08:36:43 crc kubenswrapper[4612]: I1203 08:36:43.064287 4612 generic.go:334] "Generic (PLEG): container finished" podID="1629733a-fe08-499d-b100-d92519de0a5c" containerID="1ad4226348bdc55eb6949411da7c71e1f267bfc6e8645513b8a9a8767c0265f7" exitCode=0 Dec 03 08:36:43 crc kubenswrapper[4612]: I1203 08:36:43.064482 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b8m6z" event={"ID":"1629733a-fe08-499d-b100-d92519de0a5c","Type":"ContainerDied","Data":"1ad4226348bdc55eb6949411da7c71e1f267bfc6e8645513b8a9a8767c0265f7"} Dec 03 08:36:44 crc kubenswrapper[4612]: I1203 08:36:44.075349 4612 generic.go:334] "Generic (PLEG): container finished" podID="3291f04a-d222-4d3c-bc04-de160793563e" containerID="b4362fe4f0f54ca723c26b4c1ab459bd67e76b4234a8ab370f1f4a1413043cfe" exitCode=0 Dec 03 08:36:44 crc kubenswrapper[4612]: I1203 08:36:44.075523 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nhgx" event={"ID":"3291f04a-d222-4d3c-bc04-de160793563e","Type":"ContainerDied","Data":"b4362fe4f0f54ca723c26b4c1ab459bd67e76b4234a8ab370f1f4a1413043cfe"} Dec 03 08:36:44 crc kubenswrapper[4612]: I1203 08:36:44.259220 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvm82"] Dec 03 08:36:44 crc kubenswrapper[4612]: I1203 08:36:44.259708 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xvm82" podUID="31250c24-766e-4264-865c-4081c7a487dc" containerName="registry-server" containerID="cri-o://bdd19c3e9f08908a6c831350345907ef9146e816f2a1c519d17ad21ab4b1ae53" gracePeriod=2 Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.089010 4612 generic.go:334] "Generic (PLEG): container finished" podID="31250c24-766e-4264-865c-4081c7a487dc" containerID="bdd19c3e9f08908a6c831350345907ef9146e816f2a1c519d17ad21ab4b1ae53" exitCode=0 Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.102103 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvm82" event={"ID":"31250c24-766e-4264-865c-4081c7a487dc","Type":"ContainerDied","Data":"bdd19c3e9f08908a6c831350345907ef9146e816f2a1c519d17ad21ab4b1ae53"} Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.102348 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nhgx" event={"ID":"3291f04a-d222-4d3c-bc04-de160793563e","Type":"ContainerStarted","Data":"7e734d4f63771644ac149b2bf286607aa07ff32334266385e93e41969e022fb6"} Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.102420 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b8m6z" event={"ID":"1629733a-fe08-499d-b100-d92519de0a5c","Type":"ContainerStarted","Data":"6af2313d65dc19e5e4e482ca301bf7cbbdfe0f601c769963e030706c21b1d55a"} Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.121653 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5nhgx" podStartSLOduration=2.700255415 podStartE2EDuration="6.121635925s" podCreationTimestamp="2025-12-03 08:36:39 +0000 UTC" firstStartedPulling="2025-12-03 08:36:41.038315108 +0000 UTC m=+4164.211672508" lastFinishedPulling="2025-12-03 08:36:44.459695618 +0000 UTC m=+4167.633053018" observedRunningTime="2025-12-03 08:36:45.118719184 +0000 UTC m=+4168.292076584" watchObservedRunningTime="2025-12-03 08:36:45.121635925 +0000 UTC m=+4168.294993345" Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.169556 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b8m6z" podStartSLOduration=3.300719253 podStartE2EDuration="6.169536996s" podCreationTimestamp="2025-12-03 08:36:39 +0000 UTC" firstStartedPulling="2025-12-03 08:36:41.030572881 +0000 UTC m=+4164.203930281" lastFinishedPulling="2025-12-03 08:36:43.899390614 +0000 UTC m=+4167.072748024" observedRunningTime="2025-12-03 08:36:45.157265711 +0000 UTC m=+4168.330623111" watchObservedRunningTime="2025-12-03 08:36:45.169536996 +0000 UTC m=+4168.342894396" Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.407526 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.591035 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31250c24-766e-4264-865c-4081c7a487dc-utilities\") pod \"31250c24-766e-4264-865c-4081c7a487dc\" (UID: \"31250c24-766e-4264-865c-4081c7a487dc\") " Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.591082 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31250c24-766e-4264-865c-4081c7a487dc-catalog-content\") pod \"31250c24-766e-4264-865c-4081c7a487dc\" (UID: \"31250c24-766e-4264-865c-4081c7a487dc\") " Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.591276 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zz66x\" (UniqueName: \"kubernetes.io/projected/31250c24-766e-4264-865c-4081c7a487dc-kube-api-access-zz66x\") pod \"31250c24-766e-4264-865c-4081c7a487dc\" (UID: \"31250c24-766e-4264-865c-4081c7a487dc\") " Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.595012 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31250c24-766e-4264-865c-4081c7a487dc-utilities" (OuterVolumeSpecName: "utilities") pod "31250c24-766e-4264-865c-4081c7a487dc" (UID: "31250c24-766e-4264-865c-4081c7a487dc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.604756 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31250c24-766e-4264-865c-4081c7a487dc-kube-api-access-zz66x" (OuterVolumeSpecName: "kube-api-access-zz66x") pod "31250c24-766e-4264-865c-4081c7a487dc" (UID: "31250c24-766e-4264-865c-4081c7a487dc"). InnerVolumeSpecName "kube-api-access-zz66x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.610481 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31250c24-766e-4264-865c-4081c7a487dc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "31250c24-766e-4264-865c-4081c7a487dc" (UID: "31250c24-766e-4264-865c-4081c7a487dc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.693033 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31250c24-766e-4264-865c-4081c7a487dc-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.693073 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31250c24-766e-4264-865c-4081c7a487dc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:36:45 crc kubenswrapper[4612]: I1203 08:36:45.693086 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zz66x\" (UniqueName: \"kubernetes.io/projected/31250c24-766e-4264-865c-4081c7a487dc-kube-api-access-zz66x\") on node \"crc\" DevicePath \"\"" Dec 03 08:36:46 crc kubenswrapper[4612]: I1203 08:36:46.107874 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvm82" event={"ID":"31250c24-766e-4264-865c-4081c7a487dc","Type":"ContainerDied","Data":"79ba9f9b67d03fce30a61747d36fa26bb274ba7dfd8ea58419309ec92a307283"} Dec 03 08:36:46 crc kubenswrapper[4612]: I1203 08:36:46.108917 4612 scope.go:117] "RemoveContainer" containerID="bdd19c3e9f08908a6c831350345907ef9146e816f2a1c519d17ad21ab4b1ae53" Dec 03 08:36:46 crc kubenswrapper[4612]: I1203 08:36:46.108157 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvm82" Dec 03 08:36:46 crc kubenswrapper[4612]: I1203 08:36:46.137449 4612 scope.go:117] "RemoveContainer" containerID="a1787b15e87d63435ab6d1d6a9fb6a06ee0fab702b25b55e4ca47afae8fb947e" Dec 03 08:36:46 crc kubenswrapper[4612]: I1203 08:36:46.138166 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvm82"] Dec 03 08:36:46 crc kubenswrapper[4612]: I1203 08:36:46.155603 4612 scope.go:117] "RemoveContainer" containerID="1b60e0b1cf53309f60fc1b88a32757c5a92a172232700a504e68d1263cd18630" Dec 03 08:36:46 crc kubenswrapper[4612]: I1203 08:36:46.164437 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvm82"] Dec 03 08:36:47 crc kubenswrapper[4612]: I1203 08:36:47.107656 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31250c24-766e-4264-865c-4081c7a487dc" path="/var/lib/kubelet/pods/31250c24-766e-4264-865c-4081c7a487dc/volumes" Dec 03 08:36:49 crc kubenswrapper[4612]: I1203 08:36:49.394222 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:49 crc kubenswrapper[4612]: I1203 08:36:49.395169 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:49 crc kubenswrapper[4612]: I1203 08:36:49.444094 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:49 crc kubenswrapper[4612]: I1203 08:36:49.832917 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:49 crc kubenswrapper[4612]: I1203 08:36:49.833000 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:49 crc kubenswrapper[4612]: I1203 08:36:49.894926 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:50 crc kubenswrapper[4612]: I1203 08:36:50.196163 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:50 crc kubenswrapper[4612]: I1203 08:36:50.209256 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:52 crc kubenswrapper[4612]: I1203 08:36:52.255578 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5nhgx"] Dec 03 08:36:52 crc kubenswrapper[4612]: I1203 08:36:52.256037 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5nhgx" podUID="3291f04a-d222-4d3c-bc04-de160793563e" containerName="registry-server" containerID="cri-o://7e734d4f63771644ac149b2bf286607aa07ff32334266385e93e41969e022fb6" gracePeriod=2 Dec 03 08:36:52 crc kubenswrapper[4612]: I1203 08:36:52.785259 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:52 crc kubenswrapper[4612]: I1203 08:36:52.936915 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3291f04a-d222-4d3c-bc04-de160793563e-catalog-content\") pod \"3291f04a-d222-4d3c-bc04-de160793563e\" (UID: \"3291f04a-d222-4d3c-bc04-de160793563e\") " Dec 03 08:36:52 crc kubenswrapper[4612]: I1203 08:36:52.937046 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgj25\" (UniqueName: \"kubernetes.io/projected/3291f04a-d222-4d3c-bc04-de160793563e-kube-api-access-vgj25\") pod \"3291f04a-d222-4d3c-bc04-de160793563e\" (UID: \"3291f04a-d222-4d3c-bc04-de160793563e\") " Dec 03 08:36:52 crc kubenswrapper[4612]: I1203 08:36:52.937098 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3291f04a-d222-4d3c-bc04-de160793563e-utilities\") pod \"3291f04a-d222-4d3c-bc04-de160793563e\" (UID: \"3291f04a-d222-4d3c-bc04-de160793563e\") " Dec 03 08:36:52 crc kubenswrapper[4612]: I1203 08:36:52.937968 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3291f04a-d222-4d3c-bc04-de160793563e-utilities" (OuterVolumeSpecName: "utilities") pod "3291f04a-d222-4d3c-bc04-de160793563e" (UID: "3291f04a-d222-4d3c-bc04-de160793563e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:36:52 crc kubenswrapper[4612]: I1203 08:36:52.946154 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3291f04a-d222-4d3c-bc04-de160793563e-kube-api-access-vgj25" (OuterVolumeSpecName: "kube-api-access-vgj25") pod "3291f04a-d222-4d3c-bc04-de160793563e" (UID: "3291f04a-d222-4d3c-bc04-de160793563e"). InnerVolumeSpecName "kube-api-access-vgj25". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:36:52 crc kubenswrapper[4612]: I1203 08:36:52.990837 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3291f04a-d222-4d3c-bc04-de160793563e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3291f04a-d222-4d3c-bc04-de160793563e" (UID: "3291f04a-d222-4d3c-bc04-de160793563e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.039658 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgj25\" (UniqueName: \"kubernetes.io/projected/3291f04a-d222-4d3c-bc04-de160793563e-kube-api-access-vgj25\") on node \"crc\" DevicePath \"\"" Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.039696 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3291f04a-d222-4d3c-bc04-de160793563e-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.039707 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3291f04a-d222-4d3c-bc04-de160793563e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.179369 4612 generic.go:334] "Generic (PLEG): container finished" podID="3291f04a-d222-4d3c-bc04-de160793563e" containerID="7e734d4f63771644ac149b2bf286607aa07ff32334266385e93e41969e022fb6" exitCode=0 Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.179423 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nhgx" event={"ID":"3291f04a-d222-4d3c-bc04-de160793563e","Type":"ContainerDied","Data":"7e734d4f63771644ac149b2bf286607aa07ff32334266385e93e41969e022fb6"} Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.179456 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nhgx" event={"ID":"3291f04a-d222-4d3c-bc04-de160793563e","Type":"ContainerDied","Data":"8c04782c7821631609624c8ed8fbc6631e7f03f27f290ac935816aaafe8db9bc"} Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.179477 4612 scope.go:117] "RemoveContainer" containerID="7e734d4f63771644ac149b2bf286607aa07ff32334266385e93e41969e022fb6" Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.179619 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5nhgx" Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.205060 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5nhgx"] Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.209574 4612 scope.go:117] "RemoveContainer" containerID="b4362fe4f0f54ca723c26b4c1ab459bd67e76b4234a8ab370f1f4a1413043cfe" Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.213935 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5nhgx"] Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.657309 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b8m6z"] Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.657567 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-b8m6z" podUID="1629733a-fe08-499d-b100-d92519de0a5c" containerName="registry-server" containerID="cri-o://6af2313d65dc19e5e4e482ca301bf7cbbdfe0f601c769963e030706c21b1d55a" gracePeriod=2 Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.906477 4612 scope.go:117] "RemoveContainer" containerID="69222b5de348a24a26ca82710e269e981bb0e68ba31af6385f7e551e4dca3da4" Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.955795 4612 scope.go:117] "RemoveContainer" containerID="7e734d4f63771644ac149b2bf286607aa07ff32334266385e93e41969e022fb6" Dec 03 08:36:53 crc kubenswrapper[4612]: E1203 08:36:53.956626 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e734d4f63771644ac149b2bf286607aa07ff32334266385e93e41969e022fb6\": container with ID starting with 7e734d4f63771644ac149b2bf286607aa07ff32334266385e93e41969e022fb6 not found: ID does not exist" containerID="7e734d4f63771644ac149b2bf286607aa07ff32334266385e93e41969e022fb6" Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.956686 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e734d4f63771644ac149b2bf286607aa07ff32334266385e93e41969e022fb6"} err="failed to get container status \"7e734d4f63771644ac149b2bf286607aa07ff32334266385e93e41969e022fb6\": rpc error: code = NotFound desc = could not find container \"7e734d4f63771644ac149b2bf286607aa07ff32334266385e93e41969e022fb6\": container with ID starting with 7e734d4f63771644ac149b2bf286607aa07ff32334266385e93e41969e022fb6 not found: ID does not exist" Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.956720 4612 scope.go:117] "RemoveContainer" containerID="b4362fe4f0f54ca723c26b4c1ab459bd67e76b4234a8ab370f1f4a1413043cfe" Dec 03 08:36:53 crc kubenswrapper[4612]: E1203 08:36:53.957357 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4362fe4f0f54ca723c26b4c1ab459bd67e76b4234a8ab370f1f4a1413043cfe\": container with ID starting with b4362fe4f0f54ca723c26b4c1ab459bd67e76b4234a8ab370f1f4a1413043cfe not found: ID does not exist" containerID="b4362fe4f0f54ca723c26b4c1ab459bd67e76b4234a8ab370f1f4a1413043cfe" Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.957420 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4362fe4f0f54ca723c26b4c1ab459bd67e76b4234a8ab370f1f4a1413043cfe"} err="failed to get container status \"b4362fe4f0f54ca723c26b4c1ab459bd67e76b4234a8ab370f1f4a1413043cfe\": rpc error: code = NotFound desc = could not find container \"b4362fe4f0f54ca723c26b4c1ab459bd67e76b4234a8ab370f1f4a1413043cfe\": container with ID starting with b4362fe4f0f54ca723c26b4c1ab459bd67e76b4234a8ab370f1f4a1413043cfe not found: ID does not exist" Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.957468 4612 scope.go:117] "RemoveContainer" containerID="69222b5de348a24a26ca82710e269e981bb0e68ba31af6385f7e551e4dca3da4" Dec 03 08:36:53 crc kubenswrapper[4612]: E1203 08:36:53.957785 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69222b5de348a24a26ca82710e269e981bb0e68ba31af6385f7e551e4dca3da4\": container with ID starting with 69222b5de348a24a26ca82710e269e981bb0e68ba31af6385f7e551e4dca3da4 not found: ID does not exist" containerID="69222b5de348a24a26ca82710e269e981bb0e68ba31af6385f7e551e4dca3da4" Dec 03 08:36:53 crc kubenswrapper[4612]: I1203 08:36:53.957803 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69222b5de348a24a26ca82710e269e981bb0e68ba31af6385f7e551e4dca3da4"} err="failed to get container status \"69222b5de348a24a26ca82710e269e981bb0e68ba31af6385f7e551e4dca3da4\": rpc error: code = NotFound desc = could not find container \"69222b5de348a24a26ca82710e269e981bb0e68ba31af6385f7e551e4dca3da4\": container with ID starting with 69222b5de348a24a26ca82710e269e981bb0e68ba31af6385f7e551e4dca3da4 not found: ID does not exist" Dec 03 08:36:54 crc kubenswrapper[4612]: I1203 08:36:54.939666 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.099180 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3291f04a-d222-4d3c-bc04-de160793563e" path="/var/lib/kubelet/pods/3291f04a-d222-4d3c-bc04-de160793563e/volumes" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.110330 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1629733a-fe08-499d-b100-d92519de0a5c-utilities\") pod \"1629733a-fe08-499d-b100-d92519de0a5c\" (UID: \"1629733a-fe08-499d-b100-d92519de0a5c\") " Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.110393 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfxn7\" (UniqueName: \"kubernetes.io/projected/1629733a-fe08-499d-b100-d92519de0a5c-kube-api-access-cfxn7\") pod \"1629733a-fe08-499d-b100-d92519de0a5c\" (UID: \"1629733a-fe08-499d-b100-d92519de0a5c\") " Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.110508 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1629733a-fe08-499d-b100-d92519de0a5c-catalog-content\") pod \"1629733a-fe08-499d-b100-d92519de0a5c\" (UID: \"1629733a-fe08-499d-b100-d92519de0a5c\") " Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.116703 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1629733a-fe08-499d-b100-d92519de0a5c-utilities" (OuterVolumeSpecName: "utilities") pod "1629733a-fe08-499d-b100-d92519de0a5c" (UID: "1629733a-fe08-499d-b100-d92519de0a5c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.157540 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1629733a-fe08-499d-b100-d92519de0a5c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1629733a-fe08-499d-b100-d92519de0a5c" (UID: "1629733a-fe08-499d-b100-d92519de0a5c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.212422 4612 generic.go:334] "Generic (PLEG): container finished" podID="1629733a-fe08-499d-b100-d92519de0a5c" containerID="6af2313d65dc19e5e4e482ca301bf7cbbdfe0f601c769963e030706c21b1d55a" exitCode=0 Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.212471 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b8m6z" event={"ID":"1629733a-fe08-499d-b100-d92519de0a5c","Type":"ContainerDied","Data":"6af2313d65dc19e5e4e482ca301bf7cbbdfe0f601c769963e030706c21b1d55a"} Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.212503 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b8m6z" event={"ID":"1629733a-fe08-499d-b100-d92519de0a5c","Type":"ContainerDied","Data":"e8c018d6f253207d2955f4f3a3a91f7e2a92a21719884369a25c9efb43eddc67"} Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.212521 4612 scope.go:117] "RemoveContainer" containerID="6af2313d65dc19e5e4e482ca301bf7cbbdfe0f601c769963e030706c21b1d55a" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.212658 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b8m6z" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.213137 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1629733a-fe08-499d-b100-d92519de0a5c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.213167 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1629733a-fe08-499d-b100-d92519de0a5c-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.238842 4612 scope.go:117] "RemoveContainer" containerID="1ad4226348bdc55eb6949411da7c71e1f267bfc6e8645513b8a9a8767c0265f7" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.605736 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1629733a-fe08-499d-b100-d92519de0a5c-kube-api-access-cfxn7" (OuterVolumeSpecName: "kube-api-access-cfxn7") pod "1629733a-fe08-499d-b100-d92519de0a5c" (UID: "1629733a-fe08-499d-b100-d92519de0a5c"). InnerVolumeSpecName "kube-api-access-cfxn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.621914 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfxn7\" (UniqueName: \"kubernetes.io/projected/1629733a-fe08-499d-b100-d92519de0a5c-kube-api-access-cfxn7\") on node \"crc\" DevicePath \"\"" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.625589 4612 scope.go:117] "RemoveContainer" containerID="2ad3d55b391a1eeede50359d46181c667d1cebb2f11f784fd9fb8bf5c720b173" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.697130 4612 scope.go:117] "RemoveContainer" containerID="6af2313d65dc19e5e4e482ca301bf7cbbdfe0f601c769963e030706c21b1d55a" Dec 03 08:36:55 crc kubenswrapper[4612]: E1203 08:36:55.697966 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6af2313d65dc19e5e4e482ca301bf7cbbdfe0f601c769963e030706c21b1d55a\": container with ID starting with 6af2313d65dc19e5e4e482ca301bf7cbbdfe0f601c769963e030706c21b1d55a not found: ID does not exist" containerID="6af2313d65dc19e5e4e482ca301bf7cbbdfe0f601c769963e030706c21b1d55a" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.697995 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6af2313d65dc19e5e4e482ca301bf7cbbdfe0f601c769963e030706c21b1d55a"} err="failed to get container status \"6af2313d65dc19e5e4e482ca301bf7cbbdfe0f601c769963e030706c21b1d55a\": rpc error: code = NotFound desc = could not find container \"6af2313d65dc19e5e4e482ca301bf7cbbdfe0f601c769963e030706c21b1d55a\": container with ID starting with 6af2313d65dc19e5e4e482ca301bf7cbbdfe0f601c769963e030706c21b1d55a not found: ID does not exist" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.698015 4612 scope.go:117] "RemoveContainer" containerID="1ad4226348bdc55eb6949411da7c71e1f267bfc6e8645513b8a9a8767c0265f7" Dec 03 08:36:55 crc kubenswrapper[4612]: E1203 08:36:55.698326 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ad4226348bdc55eb6949411da7c71e1f267bfc6e8645513b8a9a8767c0265f7\": container with ID starting with 1ad4226348bdc55eb6949411da7c71e1f267bfc6e8645513b8a9a8767c0265f7 not found: ID does not exist" containerID="1ad4226348bdc55eb6949411da7c71e1f267bfc6e8645513b8a9a8767c0265f7" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.698346 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ad4226348bdc55eb6949411da7c71e1f267bfc6e8645513b8a9a8767c0265f7"} err="failed to get container status \"1ad4226348bdc55eb6949411da7c71e1f267bfc6e8645513b8a9a8767c0265f7\": rpc error: code = NotFound desc = could not find container \"1ad4226348bdc55eb6949411da7c71e1f267bfc6e8645513b8a9a8767c0265f7\": container with ID starting with 1ad4226348bdc55eb6949411da7c71e1f267bfc6e8645513b8a9a8767c0265f7 not found: ID does not exist" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.698358 4612 scope.go:117] "RemoveContainer" containerID="2ad3d55b391a1eeede50359d46181c667d1cebb2f11f784fd9fb8bf5c720b173" Dec 03 08:36:55 crc kubenswrapper[4612]: E1203 08:36:55.698716 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ad3d55b391a1eeede50359d46181c667d1cebb2f11f784fd9fb8bf5c720b173\": container with ID starting with 2ad3d55b391a1eeede50359d46181c667d1cebb2f11f784fd9fb8bf5c720b173 not found: ID does not exist" containerID="2ad3d55b391a1eeede50359d46181c667d1cebb2f11f784fd9fb8bf5c720b173" Dec 03 08:36:55 crc kubenswrapper[4612]: I1203 08:36:55.698736 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ad3d55b391a1eeede50359d46181c667d1cebb2f11f784fd9fb8bf5c720b173"} err="failed to get container status \"2ad3d55b391a1eeede50359d46181c667d1cebb2f11f784fd9fb8bf5c720b173\": rpc error: code = NotFound desc = could not find container \"2ad3d55b391a1eeede50359d46181c667d1cebb2f11f784fd9fb8bf5c720b173\": container with ID starting with 2ad3d55b391a1eeede50359d46181c667d1cebb2f11f784fd9fb8bf5c720b173 not found: ID does not exist" Dec 03 08:36:56 crc kubenswrapper[4612]: I1203 08:36:56.319843 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b8m6z"] Dec 03 08:36:56 crc kubenswrapper[4612]: I1203 08:36:56.336153 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-b8m6z"] Dec 03 08:36:57 crc kubenswrapper[4612]: I1203 08:36:57.107371 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1629733a-fe08-499d-b100-d92519de0a5c" path="/var/lib/kubelet/pods/1629733a-fe08-499d-b100-d92519de0a5c/volumes" Dec 03 08:37:47 crc kubenswrapper[4612]: I1203 08:37:47.135926 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:37:47 crc kubenswrapper[4612]: I1203 08:37:47.136691 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:38:17 crc kubenswrapper[4612]: I1203 08:38:17.136201 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:38:17 crc kubenswrapper[4612]: I1203 08:38:17.136853 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:38:40 crc kubenswrapper[4612]: I1203 08:38:40.799889 4612 generic.go:334] "Generic (PLEG): container finished" podID="5cc1684d-023d-46c3-8f87-3e91941a34e9" containerID="fdf713b8e5bbaeb55cb8675dd27ef780b8a12f7bafc10ab74655326163053706" exitCode=0 Dec 03 08:38:40 crc kubenswrapper[4612]: I1203 08:38:40.800565 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5cc1684d-023d-46c3-8f87-3e91941a34e9","Type":"ContainerDied","Data":"fdf713b8e5bbaeb55cb8675dd27ef780b8a12f7bafc10ab74655326163053706"} Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.172665 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.235493 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-openstack-config-secret\") pod \"5cc1684d-023d-46c3-8f87-3e91941a34e9\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.235542 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cc1684d-023d-46c3-8f87-3e91941a34e9-config-data\") pod \"5cc1684d-023d-46c3-8f87-3e91941a34e9\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.235654 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5cc1684d-023d-46c3-8f87-3e91941a34e9-test-operator-ephemeral-workdir\") pod \"5cc1684d-023d-46c3-8f87-3e91941a34e9\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.235679 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5cc1684d-023d-46c3-8f87-3e91941a34e9-test-operator-ephemeral-temporary\") pod \"5cc1684d-023d-46c3-8f87-3e91941a34e9\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.235716 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l54s9\" (UniqueName: \"kubernetes.io/projected/5cc1684d-023d-46c3-8f87-3e91941a34e9-kube-api-access-l54s9\") pod \"5cc1684d-023d-46c3-8f87-3e91941a34e9\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.235740 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-ssh-key\") pod \"5cc1684d-023d-46c3-8f87-3e91941a34e9\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.235785 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5cc1684d-023d-46c3-8f87-3e91941a34e9-openstack-config\") pod \"5cc1684d-023d-46c3-8f87-3e91941a34e9\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.235868 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-ca-certs\") pod \"5cc1684d-023d-46c3-8f87-3e91941a34e9\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.235922 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"5cc1684d-023d-46c3-8f87-3e91941a34e9\" (UID: \"5cc1684d-023d-46c3-8f87-3e91941a34e9\") " Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.237868 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cc1684d-023d-46c3-8f87-3e91941a34e9-config-data" (OuterVolumeSpecName: "config-data") pod "5cc1684d-023d-46c3-8f87-3e91941a34e9" (UID: "5cc1684d-023d-46c3-8f87-3e91941a34e9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.238016 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cc1684d-023d-46c3-8f87-3e91941a34e9-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "5cc1684d-023d-46c3-8f87-3e91941a34e9" (UID: "5cc1684d-023d-46c3-8f87-3e91941a34e9"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.245476 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cc1684d-023d-46c3-8f87-3e91941a34e9-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "5cc1684d-023d-46c3-8f87-3e91941a34e9" (UID: "5cc1684d-023d-46c3-8f87-3e91941a34e9"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.256387 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "test-operator-logs") pod "5cc1684d-023d-46c3-8f87-3e91941a34e9" (UID: "5cc1684d-023d-46c3-8f87-3e91941a34e9"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.264283 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cc1684d-023d-46c3-8f87-3e91941a34e9-kube-api-access-l54s9" (OuterVolumeSpecName: "kube-api-access-l54s9") pod "5cc1684d-023d-46c3-8f87-3e91941a34e9" (UID: "5cc1684d-023d-46c3-8f87-3e91941a34e9"). InnerVolumeSpecName "kube-api-access-l54s9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.267347 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "5cc1684d-023d-46c3-8f87-3e91941a34e9" (UID: "5cc1684d-023d-46c3-8f87-3e91941a34e9"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.271206 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5cc1684d-023d-46c3-8f87-3e91941a34e9" (UID: "5cc1684d-023d-46c3-8f87-3e91941a34e9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.294583 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "5cc1684d-023d-46c3-8f87-3e91941a34e9" (UID: "5cc1684d-023d-46c3-8f87-3e91941a34e9"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.297568 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cc1684d-023d-46c3-8f87-3e91941a34e9-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "5cc1684d-023d-46c3-8f87-3e91941a34e9" (UID: "5cc1684d-023d-46c3-8f87-3e91941a34e9"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.338091 4612 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.338126 4612 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cc1684d-023d-46c3-8f87-3e91941a34e9-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.338139 4612 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5cc1684d-023d-46c3-8f87-3e91941a34e9-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.338152 4612 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5cc1684d-023d-46c3-8f87-3e91941a34e9-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.338164 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l54s9\" (UniqueName: \"kubernetes.io/projected/5cc1684d-023d-46c3-8f87-3e91941a34e9-kube-api-access-l54s9\") on node \"crc\" DevicePath \"\"" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.338174 4612 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.338184 4612 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5cc1684d-023d-46c3-8f87-3e91941a34e9-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.338195 4612 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5cc1684d-023d-46c3-8f87-3e91941a34e9-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.338505 4612 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.365636 4612 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.440612 4612 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.821442 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5cc1684d-023d-46c3-8f87-3e91941a34e9","Type":"ContainerDied","Data":"0893af4b80fc2e23894cbf1dd2ee6555b9dbe3cbcf8764180ee3ce20d49c72fe"} Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.821744 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0893af4b80fc2e23894cbf1dd2ee6555b9dbe3cbcf8764180ee3ce20d49c72fe" Dec 03 08:38:42 crc kubenswrapper[4612]: I1203 08:38:42.821519 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 08:38:42 crc kubenswrapper[4612]: E1203 08:38:42.919770 4612 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5cc1684d_023d_46c3_8f87_3e91941a34e9.slice/crio-0893af4b80fc2e23894cbf1dd2ee6555b9dbe3cbcf8764180ee3ce20d49c72fe\": RecentStats: unable to find data in memory cache]" Dec 03 08:38:47 crc kubenswrapper[4612]: I1203 08:38:47.137561 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:38:47 crc kubenswrapper[4612]: I1203 08:38:47.138291 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:38:47 crc kubenswrapper[4612]: I1203 08:38:47.138356 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 08:38:47 crc kubenswrapper[4612]: I1203 08:38:47.139558 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:38:47 crc kubenswrapper[4612]: I1203 08:38:47.139657 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" gracePeriod=600 Dec 03 08:38:47 crc kubenswrapper[4612]: E1203 08:38:47.274431 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:38:47 crc kubenswrapper[4612]: I1203 08:38:47.874162 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" exitCode=0 Dec 03 08:38:47 crc kubenswrapper[4612]: I1203 08:38:47.874239 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44"} Dec 03 08:38:47 crc kubenswrapper[4612]: I1203 08:38:47.874295 4612 scope.go:117] "RemoveContainer" containerID="b18fba5188318493cf361f53dfcbb2b755a0924fb79a1d52a2e107d6715f35cd" Dec 03 08:38:47 crc kubenswrapper[4612]: I1203 08:38:47.875296 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:38:47 crc kubenswrapper[4612]: E1203 08:38:47.875784 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.461202 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 03 08:38:50 crc kubenswrapper[4612]: E1203 08:38:50.461723 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31250c24-766e-4264-865c-4081c7a487dc" containerName="extract-utilities" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.461736 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="31250c24-766e-4264-865c-4081c7a487dc" containerName="extract-utilities" Dec 03 08:38:50 crc kubenswrapper[4612]: E1203 08:38:50.461746 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31250c24-766e-4264-865c-4081c7a487dc" containerName="registry-server" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.461752 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="31250c24-766e-4264-865c-4081c7a487dc" containerName="registry-server" Dec 03 08:38:50 crc kubenswrapper[4612]: E1203 08:38:50.461766 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3291f04a-d222-4d3c-bc04-de160793563e" containerName="extract-utilities" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.461772 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3291f04a-d222-4d3c-bc04-de160793563e" containerName="extract-utilities" Dec 03 08:38:50 crc kubenswrapper[4612]: E1203 08:38:50.461785 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1629733a-fe08-499d-b100-d92519de0a5c" containerName="registry-server" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.461792 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1629733a-fe08-499d-b100-d92519de0a5c" containerName="registry-server" Dec 03 08:38:50 crc kubenswrapper[4612]: E1203 08:38:50.461805 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1629733a-fe08-499d-b100-d92519de0a5c" containerName="extract-content" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.461811 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1629733a-fe08-499d-b100-d92519de0a5c" containerName="extract-content" Dec 03 08:38:50 crc kubenswrapper[4612]: E1203 08:38:50.461819 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31250c24-766e-4264-865c-4081c7a487dc" containerName="extract-content" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.461826 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="31250c24-766e-4264-865c-4081c7a487dc" containerName="extract-content" Dec 03 08:38:50 crc kubenswrapper[4612]: E1203 08:38:50.461845 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3291f04a-d222-4d3c-bc04-de160793563e" containerName="extract-content" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.461851 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3291f04a-d222-4d3c-bc04-de160793563e" containerName="extract-content" Dec 03 08:38:50 crc kubenswrapper[4612]: E1203 08:38:50.461876 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1629733a-fe08-499d-b100-d92519de0a5c" containerName="extract-utilities" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.461882 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1629733a-fe08-499d-b100-d92519de0a5c" containerName="extract-utilities" Dec 03 08:38:50 crc kubenswrapper[4612]: E1203 08:38:50.461892 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3291f04a-d222-4d3c-bc04-de160793563e" containerName="registry-server" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.461899 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="3291f04a-d222-4d3c-bc04-de160793563e" containerName="registry-server" Dec 03 08:38:50 crc kubenswrapper[4612]: E1203 08:38:50.461913 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cc1684d-023d-46c3-8f87-3e91941a34e9" containerName="tempest-tests-tempest-tests-runner" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.461919 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cc1684d-023d-46c3-8f87-3e91941a34e9" containerName="tempest-tests-tempest-tests-runner" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.462134 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cc1684d-023d-46c3-8f87-3e91941a34e9" containerName="tempest-tests-tempest-tests-runner" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.462153 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="31250c24-766e-4264-865c-4081c7a487dc" containerName="registry-server" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.462163 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="3291f04a-d222-4d3c-bc04-de160793563e" containerName="registry-server" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.462204 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="1629733a-fe08-499d-b100-d92519de0a5c" containerName="registry-server" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.462967 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.468879 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-6bczp" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.471396 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.601898 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"562541dd-f13c-47e5-9411-cf0d9a7c0a54\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.602041 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlz7b\" (UniqueName: \"kubernetes.io/projected/562541dd-f13c-47e5-9411-cf0d9a7c0a54-kube-api-access-wlz7b\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"562541dd-f13c-47e5-9411-cf0d9a7c0a54\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.703930 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlz7b\" (UniqueName: \"kubernetes.io/projected/562541dd-f13c-47e5-9411-cf0d9a7c0a54-kube-api-access-wlz7b\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"562541dd-f13c-47e5-9411-cf0d9a7c0a54\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.704138 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"562541dd-f13c-47e5-9411-cf0d9a7c0a54\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.705136 4612 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"562541dd-f13c-47e5-9411-cf0d9a7c0a54\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.738446 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlz7b\" (UniqueName: \"kubernetes.io/projected/562541dd-f13c-47e5-9411-cf0d9a7c0a54-kube-api-access-wlz7b\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"562541dd-f13c-47e5-9411-cf0d9a7c0a54\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.753811 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"562541dd-f13c-47e5-9411-cf0d9a7c0a54\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 08:38:50 crc kubenswrapper[4612]: I1203 08:38:50.784590 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 08:38:51 crc kubenswrapper[4612]: I1203 08:38:51.244814 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 03 08:38:51 crc kubenswrapper[4612]: I1203 08:38:51.924489 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"562541dd-f13c-47e5-9411-cf0d9a7c0a54","Type":"ContainerStarted","Data":"2c696626272586d0a6e6d7165ac280d013a4d78f8461fe5a11789efefbff6237"} Dec 03 08:38:52 crc kubenswrapper[4612]: I1203 08:38:52.937444 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"562541dd-f13c-47e5-9411-cf0d9a7c0a54","Type":"ContainerStarted","Data":"9c372752e9d8a568940282a67d7d22c6380a8329dc49122bf857e093d55c75b1"} Dec 03 08:38:52 crc kubenswrapper[4612]: I1203 08:38:52.971494 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.812435542 podStartE2EDuration="2.971465321s" podCreationTimestamp="2025-12-03 08:38:50 +0000 UTC" firstStartedPulling="2025-12-03 08:38:51.265867099 +0000 UTC m=+4294.439224499" lastFinishedPulling="2025-12-03 08:38:52.424896878 +0000 UTC m=+4295.598254278" observedRunningTime="2025-12-03 08:38:52.95643466 +0000 UTC m=+4296.129792090" watchObservedRunningTime="2025-12-03 08:38:52.971465321 +0000 UTC m=+4296.144822741" Dec 03 08:39:01 crc kubenswrapper[4612]: I1203 08:39:01.090157 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:39:01 crc kubenswrapper[4612]: E1203 08:39:01.091837 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:39:14 crc kubenswrapper[4612]: I1203 08:39:14.090400 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:39:14 crc kubenswrapper[4612]: E1203 08:39:14.091493 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:39:17 crc kubenswrapper[4612]: I1203 08:39:17.714137 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-827lw/must-gather-k9brt"] Dec 03 08:39:17 crc kubenswrapper[4612]: I1203 08:39:17.716863 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/must-gather-k9brt" Dec 03 08:39:17 crc kubenswrapper[4612]: I1203 08:39:17.725238 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-827lw"/"openshift-service-ca.crt" Dec 03 08:39:17 crc kubenswrapper[4612]: I1203 08:39:17.725932 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-827lw"/"kube-root-ca.crt" Dec 03 08:39:17 crc kubenswrapper[4612]: I1203 08:39:17.748207 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-827lw/must-gather-k9brt"] Dec 03 08:39:17 crc kubenswrapper[4612]: I1203 08:39:17.762725 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e4f1e8aa-37f4-41d8-8171-3f156d9d610d-must-gather-output\") pod \"must-gather-k9brt\" (UID: \"e4f1e8aa-37f4-41d8-8171-3f156d9d610d\") " pod="openshift-must-gather-827lw/must-gather-k9brt" Dec 03 08:39:17 crc kubenswrapper[4612]: I1203 08:39:17.762863 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6s7fl\" (UniqueName: \"kubernetes.io/projected/e4f1e8aa-37f4-41d8-8171-3f156d9d610d-kube-api-access-6s7fl\") pod \"must-gather-k9brt\" (UID: \"e4f1e8aa-37f4-41d8-8171-3f156d9d610d\") " pod="openshift-must-gather-827lw/must-gather-k9brt" Dec 03 08:39:17 crc kubenswrapper[4612]: I1203 08:39:17.865009 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e4f1e8aa-37f4-41d8-8171-3f156d9d610d-must-gather-output\") pod \"must-gather-k9brt\" (UID: \"e4f1e8aa-37f4-41d8-8171-3f156d9d610d\") " pod="openshift-must-gather-827lw/must-gather-k9brt" Dec 03 08:39:17 crc kubenswrapper[4612]: I1203 08:39:17.865123 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6s7fl\" (UniqueName: \"kubernetes.io/projected/e4f1e8aa-37f4-41d8-8171-3f156d9d610d-kube-api-access-6s7fl\") pod \"must-gather-k9brt\" (UID: \"e4f1e8aa-37f4-41d8-8171-3f156d9d610d\") " pod="openshift-must-gather-827lw/must-gather-k9brt" Dec 03 08:39:17 crc kubenswrapper[4612]: I1203 08:39:17.865771 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e4f1e8aa-37f4-41d8-8171-3f156d9d610d-must-gather-output\") pod \"must-gather-k9brt\" (UID: \"e4f1e8aa-37f4-41d8-8171-3f156d9d610d\") " pod="openshift-must-gather-827lw/must-gather-k9brt" Dec 03 08:39:17 crc kubenswrapper[4612]: I1203 08:39:17.892477 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6s7fl\" (UniqueName: \"kubernetes.io/projected/e4f1e8aa-37f4-41d8-8171-3f156d9d610d-kube-api-access-6s7fl\") pod \"must-gather-k9brt\" (UID: \"e4f1e8aa-37f4-41d8-8171-3f156d9d610d\") " pod="openshift-must-gather-827lw/must-gather-k9brt" Dec 03 08:39:18 crc kubenswrapper[4612]: I1203 08:39:18.043232 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/must-gather-k9brt" Dec 03 08:39:19 crc kubenswrapper[4612]: I1203 08:39:19.371812 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-827lw/must-gather-k9brt"] Dec 03 08:39:20 crc kubenswrapper[4612]: I1203 08:39:20.258118 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-827lw/must-gather-k9brt" event={"ID":"e4f1e8aa-37f4-41d8-8171-3f156d9d610d","Type":"ContainerStarted","Data":"ad72940f5d27196d451bb0defea18830c82e530a14060a1ef7801a6ae85f87d1"} Dec 03 08:39:25 crc kubenswrapper[4612]: I1203 08:39:25.094086 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:39:25 crc kubenswrapper[4612]: E1203 08:39:25.094751 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:39:25 crc kubenswrapper[4612]: I1203 08:39:25.318960 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-827lw/must-gather-k9brt" event={"ID":"e4f1e8aa-37f4-41d8-8171-3f156d9d610d","Type":"ContainerStarted","Data":"e04f5a7b30e12e2d6556daa3bb16d22d29cc2d2cb04c6ea0088278cbeb7b79f4"} Dec 03 08:39:25 crc kubenswrapper[4612]: I1203 08:39:25.319042 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-827lw/must-gather-k9brt" event={"ID":"e4f1e8aa-37f4-41d8-8171-3f156d9d610d","Type":"ContainerStarted","Data":"742e33a909a700167757e19be6d3c8966200a359efd224d9e436af0efe29178c"} Dec 03 08:39:30 crc kubenswrapper[4612]: I1203 08:39:30.381662 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-827lw/must-gather-k9brt" podStartSLOduration=8.137905698 podStartE2EDuration="13.381639057s" podCreationTimestamp="2025-12-03 08:39:17 +0000 UTC" firstStartedPulling="2025-12-03 08:39:19.381052969 +0000 UTC m=+4322.554410369" lastFinishedPulling="2025-12-03 08:39:24.624786328 +0000 UTC m=+4327.798143728" observedRunningTime="2025-12-03 08:39:25.337096927 +0000 UTC m=+4328.510454337" watchObservedRunningTime="2025-12-03 08:39:30.381639057 +0000 UTC m=+4333.554996467" Dec 03 08:39:30 crc kubenswrapper[4612]: I1203 08:39:30.388589 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-827lw/crc-debug-pz29b"] Dec 03 08:39:30 crc kubenswrapper[4612]: I1203 08:39:30.390123 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/crc-debug-pz29b" Dec 03 08:39:30 crc kubenswrapper[4612]: I1203 08:39:30.392076 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-827lw"/"default-dockercfg-hs272" Dec 03 08:39:30 crc kubenswrapper[4612]: I1203 08:39:30.448674 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz5pj\" (UniqueName: \"kubernetes.io/projected/8ec7bbf4-9ce5-44b3-a43b-61eba010d01e-kube-api-access-tz5pj\") pod \"crc-debug-pz29b\" (UID: \"8ec7bbf4-9ce5-44b3-a43b-61eba010d01e\") " pod="openshift-must-gather-827lw/crc-debug-pz29b" Dec 03 08:39:30 crc kubenswrapper[4612]: I1203 08:39:30.448907 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ec7bbf4-9ce5-44b3-a43b-61eba010d01e-host\") pod \"crc-debug-pz29b\" (UID: \"8ec7bbf4-9ce5-44b3-a43b-61eba010d01e\") " pod="openshift-must-gather-827lw/crc-debug-pz29b" Dec 03 08:39:30 crc kubenswrapper[4612]: I1203 08:39:30.550923 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz5pj\" (UniqueName: \"kubernetes.io/projected/8ec7bbf4-9ce5-44b3-a43b-61eba010d01e-kube-api-access-tz5pj\") pod \"crc-debug-pz29b\" (UID: \"8ec7bbf4-9ce5-44b3-a43b-61eba010d01e\") " pod="openshift-must-gather-827lw/crc-debug-pz29b" Dec 03 08:39:30 crc kubenswrapper[4612]: I1203 08:39:30.551193 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ec7bbf4-9ce5-44b3-a43b-61eba010d01e-host\") pod \"crc-debug-pz29b\" (UID: \"8ec7bbf4-9ce5-44b3-a43b-61eba010d01e\") " pod="openshift-must-gather-827lw/crc-debug-pz29b" Dec 03 08:39:30 crc kubenswrapper[4612]: I1203 08:39:30.551602 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ec7bbf4-9ce5-44b3-a43b-61eba010d01e-host\") pod \"crc-debug-pz29b\" (UID: \"8ec7bbf4-9ce5-44b3-a43b-61eba010d01e\") " pod="openshift-must-gather-827lw/crc-debug-pz29b" Dec 03 08:39:30 crc kubenswrapper[4612]: I1203 08:39:30.573990 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz5pj\" (UniqueName: \"kubernetes.io/projected/8ec7bbf4-9ce5-44b3-a43b-61eba010d01e-kube-api-access-tz5pj\") pod \"crc-debug-pz29b\" (UID: \"8ec7bbf4-9ce5-44b3-a43b-61eba010d01e\") " pod="openshift-must-gather-827lw/crc-debug-pz29b" Dec 03 08:39:30 crc kubenswrapper[4612]: I1203 08:39:30.710572 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/crc-debug-pz29b" Dec 03 08:39:31 crc kubenswrapper[4612]: I1203 08:39:31.379482 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-827lw/crc-debug-pz29b" event={"ID":"8ec7bbf4-9ce5-44b3-a43b-61eba010d01e","Type":"ContainerStarted","Data":"6c2dc077d243ba232961cc7f18846e4ac14e1725f0b77c13c4f2ecb13df4b39a"} Dec 03 08:39:38 crc kubenswrapper[4612]: I1203 08:39:38.089317 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:39:38 crc kubenswrapper[4612]: E1203 08:39:38.090143 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:39:45 crc kubenswrapper[4612]: I1203 08:39:45.521619 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-827lw/crc-debug-pz29b" event={"ID":"8ec7bbf4-9ce5-44b3-a43b-61eba010d01e","Type":"ContainerStarted","Data":"1597027375f29b17f46f9ff7d9aab68fb39e7fa0cb20a1fadd8136865b0df594"} Dec 03 08:39:45 crc kubenswrapper[4612]: I1203 08:39:45.552707 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-827lw/crc-debug-pz29b" podStartSLOduration=1.716228468 podStartE2EDuration="15.552689426s" podCreationTimestamp="2025-12-03 08:39:30 +0000 UTC" firstStartedPulling="2025-12-03 08:39:30.751505501 +0000 UTC m=+4333.924862901" lastFinishedPulling="2025-12-03 08:39:44.587966459 +0000 UTC m=+4347.761323859" observedRunningTime="2025-12-03 08:39:45.548433974 +0000 UTC m=+4348.721791374" watchObservedRunningTime="2025-12-03 08:39:45.552689426 +0000 UTC m=+4348.726046836" Dec 03 08:39:51 crc kubenswrapper[4612]: I1203 08:39:51.089515 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:39:51 crc kubenswrapper[4612]: E1203 08:39:51.090430 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:40:04 crc kubenswrapper[4612]: I1203 08:40:04.090442 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:40:04 crc kubenswrapper[4612]: E1203 08:40:04.091044 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:40:19 crc kubenswrapper[4612]: I1203 08:40:19.090165 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:40:19 crc kubenswrapper[4612]: E1203 08:40:19.091210 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:40:31 crc kubenswrapper[4612]: I1203 08:40:31.090276 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:40:31 crc kubenswrapper[4612]: E1203 08:40:31.096713 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:40:39 crc kubenswrapper[4612]: I1203 08:40:39.860265 4612 generic.go:334] "Generic (PLEG): container finished" podID="8ec7bbf4-9ce5-44b3-a43b-61eba010d01e" containerID="1597027375f29b17f46f9ff7d9aab68fb39e7fa0cb20a1fadd8136865b0df594" exitCode=0 Dec 03 08:40:39 crc kubenswrapper[4612]: I1203 08:40:39.860665 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-827lw/crc-debug-pz29b" event={"ID":"8ec7bbf4-9ce5-44b3-a43b-61eba010d01e","Type":"ContainerDied","Data":"1597027375f29b17f46f9ff7d9aab68fb39e7fa0cb20a1fadd8136865b0df594"} Dec 03 08:40:40 crc kubenswrapper[4612]: I1203 08:40:40.987261 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/crc-debug-pz29b" Dec 03 08:40:41 crc kubenswrapper[4612]: I1203 08:40:41.033344 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-827lw/crc-debug-pz29b"] Dec 03 08:40:41 crc kubenswrapper[4612]: I1203 08:40:41.045146 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-827lw/crc-debug-pz29b"] Dec 03 08:40:41 crc kubenswrapper[4612]: I1203 08:40:41.123178 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ec7bbf4-9ce5-44b3-a43b-61eba010d01e-host\") pod \"8ec7bbf4-9ce5-44b3-a43b-61eba010d01e\" (UID: \"8ec7bbf4-9ce5-44b3-a43b-61eba010d01e\") " Dec 03 08:40:41 crc kubenswrapper[4612]: I1203 08:40:41.123291 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ec7bbf4-9ce5-44b3-a43b-61eba010d01e-host" (OuterVolumeSpecName: "host") pod "8ec7bbf4-9ce5-44b3-a43b-61eba010d01e" (UID: "8ec7bbf4-9ce5-44b3-a43b-61eba010d01e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:40:41 crc kubenswrapper[4612]: I1203 08:40:41.123965 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tz5pj\" (UniqueName: \"kubernetes.io/projected/8ec7bbf4-9ce5-44b3-a43b-61eba010d01e-kube-api-access-tz5pj\") pod \"8ec7bbf4-9ce5-44b3-a43b-61eba010d01e\" (UID: \"8ec7bbf4-9ce5-44b3-a43b-61eba010d01e\") " Dec 03 08:40:41 crc kubenswrapper[4612]: I1203 08:40:41.124469 4612 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ec7bbf4-9ce5-44b3-a43b-61eba010d01e-host\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:41 crc kubenswrapper[4612]: I1203 08:40:41.138098 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ec7bbf4-9ce5-44b3-a43b-61eba010d01e-kube-api-access-tz5pj" (OuterVolumeSpecName: "kube-api-access-tz5pj") pod "8ec7bbf4-9ce5-44b3-a43b-61eba010d01e" (UID: "8ec7bbf4-9ce5-44b3-a43b-61eba010d01e"). InnerVolumeSpecName "kube-api-access-tz5pj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:41 crc kubenswrapper[4612]: I1203 08:40:41.226261 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tz5pj\" (UniqueName: \"kubernetes.io/projected/8ec7bbf4-9ce5-44b3-a43b-61eba010d01e-kube-api-access-tz5pj\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:41 crc kubenswrapper[4612]: I1203 08:40:41.881210 4612 scope.go:117] "RemoveContainer" containerID="1597027375f29b17f46f9ff7d9aab68fb39e7fa0cb20a1fadd8136865b0df594" Dec 03 08:40:41 crc kubenswrapper[4612]: I1203 08:40:41.881347 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/crc-debug-pz29b" Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.218065 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-827lw/crc-debug-7h6jn"] Dec 03 08:40:42 crc kubenswrapper[4612]: E1203 08:40:42.218435 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ec7bbf4-9ce5-44b3-a43b-61eba010d01e" containerName="container-00" Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.218447 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ec7bbf4-9ce5-44b3-a43b-61eba010d01e" containerName="container-00" Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.218633 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ec7bbf4-9ce5-44b3-a43b-61eba010d01e" containerName="container-00" Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.219229 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/crc-debug-7h6jn" Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.221131 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-827lw"/"default-dockercfg-hs272" Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.346356 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e75898be-0b45-4505-8ab1-e02a40c4fa8e-host\") pod \"crc-debug-7h6jn\" (UID: \"e75898be-0b45-4505-8ab1-e02a40c4fa8e\") " pod="openshift-must-gather-827lw/crc-debug-7h6jn" Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.346851 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gnx9\" (UniqueName: \"kubernetes.io/projected/e75898be-0b45-4505-8ab1-e02a40c4fa8e-kube-api-access-6gnx9\") pod \"crc-debug-7h6jn\" (UID: \"e75898be-0b45-4505-8ab1-e02a40c4fa8e\") " pod="openshift-must-gather-827lw/crc-debug-7h6jn" Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.448915 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e75898be-0b45-4505-8ab1-e02a40c4fa8e-host\") pod \"crc-debug-7h6jn\" (UID: \"e75898be-0b45-4505-8ab1-e02a40c4fa8e\") " pod="openshift-must-gather-827lw/crc-debug-7h6jn" Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.449108 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e75898be-0b45-4505-8ab1-e02a40c4fa8e-host\") pod \"crc-debug-7h6jn\" (UID: \"e75898be-0b45-4505-8ab1-e02a40c4fa8e\") " pod="openshift-must-gather-827lw/crc-debug-7h6jn" Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.449309 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gnx9\" (UniqueName: \"kubernetes.io/projected/e75898be-0b45-4505-8ab1-e02a40c4fa8e-kube-api-access-6gnx9\") pod \"crc-debug-7h6jn\" (UID: \"e75898be-0b45-4505-8ab1-e02a40c4fa8e\") " pod="openshift-must-gather-827lw/crc-debug-7h6jn" Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.467416 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gnx9\" (UniqueName: \"kubernetes.io/projected/e75898be-0b45-4505-8ab1-e02a40c4fa8e-kube-api-access-6gnx9\") pod \"crc-debug-7h6jn\" (UID: \"e75898be-0b45-4505-8ab1-e02a40c4fa8e\") " pod="openshift-must-gather-827lw/crc-debug-7h6jn" Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.536516 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/crc-debug-7h6jn" Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.892311 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-827lw/crc-debug-7h6jn" event={"ID":"e75898be-0b45-4505-8ab1-e02a40c4fa8e","Type":"ContainerStarted","Data":"526778d627a19599fc5c6b4b62c1c9ba59d495985f7a170a611c654e8855312b"} Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.892571 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-827lw/crc-debug-7h6jn" event={"ID":"e75898be-0b45-4505-8ab1-e02a40c4fa8e","Type":"ContainerStarted","Data":"ac9bbbdc85979e0eaff6e7fb6673e506ee99074194e915f2569d6bd7eb77622c"} Dec 03 08:40:42 crc kubenswrapper[4612]: I1203 08:40:42.906581 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-827lw/crc-debug-7h6jn" podStartSLOduration=0.906557178 podStartE2EDuration="906.557178ms" podCreationTimestamp="2025-12-03 08:40:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:40:42.905678037 +0000 UTC m=+4406.079035437" watchObservedRunningTime="2025-12-03 08:40:42.906557178 +0000 UTC m=+4406.079914578" Dec 03 08:40:43 crc kubenswrapper[4612]: I1203 08:40:43.104989 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ec7bbf4-9ce5-44b3-a43b-61eba010d01e" path="/var/lib/kubelet/pods/8ec7bbf4-9ce5-44b3-a43b-61eba010d01e/volumes" Dec 03 08:40:43 crc kubenswrapper[4612]: I1203 08:40:43.902157 4612 generic.go:334] "Generic (PLEG): container finished" podID="e75898be-0b45-4505-8ab1-e02a40c4fa8e" containerID="526778d627a19599fc5c6b4b62c1c9ba59d495985f7a170a611c654e8855312b" exitCode=0 Dec 03 08:40:43 crc kubenswrapper[4612]: I1203 08:40:43.902206 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-827lw/crc-debug-7h6jn" event={"ID":"e75898be-0b45-4505-8ab1-e02a40c4fa8e","Type":"ContainerDied","Data":"526778d627a19599fc5c6b4b62c1c9ba59d495985f7a170a611c654e8855312b"} Dec 03 08:40:45 crc kubenswrapper[4612]: I1203 08:40:45.031212 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/crc-debug-7h6jn" Dec 03 08:40:45 crc kubenswrapper[4612]: I1203 08:40:45.112976 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-827lw/crc-debug-7h6jn"] Dec 03 08:40:45 crc kubenswrapper[4612]: I1203 08:40:45.122040 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-827lw/crc-debug-7h6jn"] Dec 03 08:40:45 crc kubenswrapper[4612]: I1203 08:40:45.194784 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gnx9\" (UniqueName: \"kubernetes.io/projected/e75898be-0b45-4505-8ab1-e02a40c4fa8e-kube-api-access-6gnx9\") pod \"e75898be-0b45-4505-8ab1-e02a40c4fa8e\" (UID: \"e75898be-0b45-4505-8ab1-e02a40c4fa8e\") " Dec 03 08:40:45 crc kubenswrapper[4612]: I1203 08:40:45.194830 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e75898be-0b45-4505-8ab1-e02a40c4fa8e-host\") pod \"e75898be-0b45-4505-8ab1-e02a40c4fa8e\" (UID: \"e75898be-0b45-4505-8ab1-e02a40c4fa8e\") " Dec 03 08:40:45 crc kubenswrapper[4612]: I1203 08:40:45.194997 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e75898be-0b45-4505-8ab1-e02a40c4fa8e-host" (OuterVolumeSpecName: "host") pod "e75898be-0b45-4505-8ab1-e02a40c4fa8e" (UID: "e75898be-0b45-4505-8ab1-e02a40c4fa8e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:40:45 crc kubenswrapper[4612]: I1203 08:40:45.196917 4612 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e75898be-0b45-4505-8ab1-e02a40c4fa8e-host\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:45 crc kubenswrapper[4612]: I1203 08:40:45.212200 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e75898be-0b45-4505-8ab1-e02a40c4fa8e-kube-api-access-6gnx9" (OuterVolumeSpecName: "kube-api-access-6gnx9") pod "e75898be-0b45-4505-8ab1-e02a40c4fa8e" (UID: "e75898be-0b45-4505-8ab1-e02a40c4fa8e"). InnerVolumeSpecName "kube-api-access-6gnx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:45 crc kubenswrapper[4612]: I1203 08:40:45.298411 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gnx9\" (UniqueName: \"kubernetes.io/projected/e75898be-0b45-4505-8ab1-e02a40c4fa8e-kube-api-access-6gnx9\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:45 crc kubenswrapper[4612]: I1203 08:40:45.921613 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac9bbbdc85979e0eaff6e7fb6673e506ee99074194e915f2569d6bd7eb77622c" Dec 03 08:40:45 crc kubenswrapper[4612]: I1203 08:40:45.921717 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/crc-debug-7h6jn" Dec 03 08:40:46 crc kubenswrapper[4612]: E1203 08:40:46.073012 4612 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode75898be_0b45_4505_8ab1_e02a40c4fa8e.slice/crio-ac9bbbdc85979e0eaff6e7fb6673e506ee99074194e915f2569d6bd7eb77622c\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode75898be_0b45_4505_8ab1_e02a40c4fa8e.slice\": RecentStats: unable to find data in memory cache]" Dec 03 08:40:46 crc kubenswrapper[4612]: I1203 08:40:46.089850 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:40:46 crc kubenswrapper[4612]: E1203 08:40:46.090139 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:40:46 crc kubenswrapper[4612]: I1203 08:40:46.351592 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-827lw/crc-debug-2b7jl"] Dec 03 08:40:46 crc kubenswrapper[4612]: E1203 08:40:46.352843 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e75898be-0b45-4505-8ab1-e02a40c4fa8e" containerName="container-00" Dec 03 08:40:46 crc kubenswrapper[4612]: I1203 08:40:46.352926 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="e75898be-0b45-4505-8ab1-e02a40c4fa8e" containerName="container-00" Dec 03 08:40:46 crc kubenswrapper[4612]: I1203 08:40:46.353242 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="e75898be-0b45-4505-8ab1-e02a40c4fa8e" containerName="container-00" Dec 03 08:40:46 crc kubenswrapper[4612]: I1203 08:40:46.354692 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/crc-debug-2b7jl" Dec 03 08:40:46 crc kubenswrapper[4612]: I1203 08:40:46.357177 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-827lw"/"default-dockercfg-hs272" Dec 03 08:40:46 crc kubenswrapper[4612]: I1203 08:40:46.521725 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmbhh\" (UniqueName: \"kubernetes.io/projected/98d0d710-2713-425b-8d79-b2f8da9f4c4b-kube-api-access-xmbhh\") pod \"crc-debug-2b7jl\" (UID: \"98d0d710-2713-425b-8d79-b2f8da9f4c4b\") " pod="openshift-must-gather-827lw/crc-debug-2b7jl" Dec 03 08:40:46 crc kubenswrapper[4612]: I1203 08:40:46.521789 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/98d0d710-2713-425b-8d79-b2f8da9f4c4b-host\") pod \"crc-debug-2b7jl\" (UID: \"98d0d710-2713-425b-8d79-b2f8da9f4c4b\") " pod="openshift-must-gather-827lw/crc-debug-2b7jl" Dec 03 08:40:46 crc kubenswrapper[4612]: I1203 08:40:46.625435 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmbhh\" (UniqueName: \"kubernetes.io/projected/98d0d710-2713-425b-8d79-b2f8da9f4c4b-kube-api-access-xmbhh\") pod \"crc-debug-2b7jl\" (UID: \"98d0d710-2713-425b-8d79-b2f8da9f4c4b\") " pod="openshift-must-gather-827lw/crc-debug-2b7jl" Dec 03 08:40:46 crc kubenswrapper[4612]: I1203 08:40:46.625515 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/98d0d710-2713-425b-8d79-b2f8da9f4c4b-host\") pod \"crc-debug-2b7jl\" (UID: \"98d0d710-2713-425b-8d79-b2f8da9f4c4b\") " pod="openshift-must-gather-827lw/crc-debug-2b7jl" Dec 03 08:40:46 crc kubenswrapper[4612]: I1203 08:40:46.625710 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/98d0d710-2713-425b-8d79-b2f8da9f4c4b-host\") pod \"crc-debug-2b7jl\" (UID: \"98d0d710-2713-425b-8d79-b2f8da9f4c4b\") " pod="openshift-must-gather-827lw/crc-debug-2b7jl" Dec 03 08:40:46 crc kubenswrapper[4612]: I1203 08:40:46.653081 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmbhh\" (UniqueName: \"kubernetes.io/projected/98d0d710-2713-425b-8d79-b2f8da9f4c4b-kube-api-access-xmbhh\") pod \"crc-debug-2b7jl\" (UID: \"98d0d710-2713-425b-8d79-b2f8da9f4c4b\") " pod="openshift-must-gather-827lw/crc-debug-2b7jl" Dec 03 08:40:46 crc kubenswrapper[4612]: I1203 08:40:46.679305 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/crc-debug-2b7jl" Dec 03 08:40:46 crc kubenswrapper[4612]: I1203 08:40:46.932049 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-827lw/crc-debug-2b7jl" event={"ID":"98d0d710-2713-425b-8d79-b2f8da9f4c4b","Type":"ContainerStarted","Data":"676035aaf2eafb17297f7c9548a4b6b60f6a9b0bf96cae1523bf5575a0388a09"} Dec 03 08:40:47 crc kubenswrapper[4612]: I1203 08:40:47.103434 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e75898be-0b45-4505-8ab1-e02a40c4fa8e" path="/var/lib/kubelet/pods/e75898be-0b45-4505-8ab1-e02a40c4fa8e/volumes" Dec 03 08:40:47 crc kubenswrapper[4612]: I1203 08:40:47.942249 4612 generic.go:334] "Generic (PLEG): container finished" podID="98d0d710-2713-425b-8d79-b2f8da9f4c4b" containerID="d79d1ec2d60091671f7de4ae599dacd746a2d47b996cf97e347c843c8df108e3" exitCode=0 Dec 03 08:40:47 crc kubenswrapper[4612]: I1203 08:40:47.942395 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-827lw/crc-debug-2b7jl" event={"ID":"98d0d710-2713-425b-8d79-b2f8da9f4c4b","Type":"ContainerDied","Data":"d79d1ec2d60091671f7de4ae599dacd746a2d47b996cf97e347c843c8df108e3"} Dec 03 08:40:47 crc kubenswrapper[4612]: I1203 08:40:47.986654 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-827lw/crc-debug-2b7jl"] Dec 03 08:40:48 crc kubenswrapper[4612]: I1203 08:40:48.002256 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-827lw/crc-debug-2b7jl"] Dec 03 08:40:49 crc kubenswrapper[4612]: I1203 08:40:49.060542 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/crc-debug-2b7jl" Dec 03 08:40:49 crc kubenswrapper[4612]: I1203 08:40:49.170623 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/98d0d710-2713-425b-8d79-b2f8da9f4c4b-host\") pod \"98d0d710-2713-425b-8d79-b2f8da9f4c4b\" (UID: \"98d0d710-2713-425b-8d79-b2f8da9f4c4b\") " Dec 03 08:40:49 crc kubenswrapper[4612]: I1203 08:40:49.170755 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmbhh\" (UniqueName: \"kubernetes.io/projected/98d0d710-2713-425b-8d79-b2f8da9f4c4b-kube-api-access-xmbhh\") pod \"98d0d710-2713-425b-8d79-b2f8da9f4c4b\" (UID: \"98d0d710-2713-425b-8d79-b2f8da9f4c4b\") " Dec 03 08:40:49 crc kubenswrapper[4612]: I1203 08:40:49.170767 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/98d0d710-2713-425b-8d79-b2f8da9f4c4b-host" (OuterVolumeSpecName: "host") pod "98d0d710-2713-425b-8d79-b2f8da9f4c4b" (UID: "98d0d710-2713-425b-8d79-b2f8da9f4c4b"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:40:49 crc kubenswrapper[4612]: I1203 08:40:49.171554 4612 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/98d0d710-2713-425b-8d79-b2f8da9f4c4b-host\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:49 crc kubenswrapper[4612]: I1203 08:40:49.178281 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98d0d710-2713-425b-8d79-b2f8da9f4c4b-kube-api-access-xmbhh" (OuterVolumeSpecName: "kube-api-access-xmbhh") pod "98d0d710-2713-425b-8d79-b2f8da9f4c4b" (UID: "98d0d710-2713-425b-8d79-b2f8da9f4c4b"). InnerVolumeSpecName "kube-api-access-xmbhh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:49 crc kubenswrapper[4612]: I1203 08:40:49.273243 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmbhh\" (UniqueName: \"kubernetes.io/projected/98d0d710-2713-425b-8d79-b2f8da9f4c4b-kube-api-access-xmbhh\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:49 crc kubenswrapper[4612]: I1203 08:40:49.961125 4612 scope.go:117] "RemoveContainer" containerID="d79d1ec2d60091671f7de4ae599dacd746a2d47b996cf97e347c843c8df108e3" Dec 03 08:40:49 crc kubenswrapper[4612]: I1203 08:40:49.961151 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/crc-debug-2b7jl" Dec 03 08:40:51 crc kubenswrapper[4612]: I1203 08:40:51.104843 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98d0d710-2713-425b-8d79-b2f8da9f4c4b" path="/var/lib/kubelet/pods/98d0d710-2713-425b-8d79-b2f8da9f4c4b/volumes" Dec 03 08:41:01 crc kubenswrapper[4612]: I1203 08:41:01.090983 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:41:01 crc kubenswrapper[4612]: E1203 08:41:01.091916 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:41:09 crc kubenswrapper[4612]: I1203 08:41:09.551338 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5687f788c4-jknl6_ad775971-28a0-4fd6-8e8b-e10e5a9c0c50/barbican-api/0.log" Dec 03 08:41:09 crc kubenswrapper[4612]: I1203 08:41:09.742438 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5687f788c4-jknl6_ad775971-28a0-4fd6-8e8b-e10e5a9c0c50/barbican-api-log/0.log" Dec 03 08:41:09 crc kubenswrapper[4612]: I1203 08:41:09.797904 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-f87f554fd-p5qhz_4d772b9f-b6df-4ca8-8a76-f28285eef6b9/barbican-keystone-listener/0.log" Dec 03 08:41:09 crc kubenswrapper[4612]: I1203 08:41:09.921300 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-f87f554fd-p5qhz_4d772b9f-b6df-4ca8-8a76-f28285eef6b9/barbican-keystone-listener-log/0.log" Dec 03 08:41:10 crc kubenswrapper[4612]: I1203 08:41:10.065181 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c9bf5d979-4pmzw_9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16/barbican-worker/0.log" Dec 03 08:41:10 crc kubenswrapper[4612]: I1203 08:41:10.167108 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c9bf5d979-4pmzw_9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16/barbican-worker-log/0.log" Dec 03 08:41:10 crc kubenswrapper[4612]: I1203 08:41:10.375224 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8_8d491f1b-5bbf-4508-8ddc-2e986613d792/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:10 crc kubenswrapper[4612]: I1203 08:41:10.452587 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3867ea54-d402-4641-936e-9038ce646012/ceilometer-central-agent/0.log" Dec 03 08:41:10 crc kubenswrapper[4612]: I1203 08:41:10.569236 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3867ea54-d402-4641-936e-9038ce646012/ceilometer-notification-agent/0.log" Dec 03 08:41:10 crc kubenswrapper[4612]: I1203 08:41:10.711901 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3867ea54-d402-4641-936e-9038ce646012/sg-core/0.log" Dec 03 08:41:10 crc kubenswrapper[4612]: I1203 08:41:10.712801 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3867ea54-d402-4641-936e-9038ce646012/proxy-httpd/0.log" Dec 03 08:41:10 crc kubenswrapper[4612]: I1203 08:41:10.910182 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_751ad1b4-cd3c-4616-99ed-9b19fee06ae8/cinder-api/0.log" Dec 03 08:41:11 crc kubenswrapper[4612]: I1203 08:41:11.000465 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_751ad1b4-cd3c-4616-99ed-9b19fee06ae8/cinder-api-log/0.log" Dec 03 08:41:11 crc kubenswrapper[4612]: I1203 08:41:11.130283 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_df089922-13b4-43d5-beaf-8dff66c6e7cb/cinder-scheduler/0.log" Dec 03 08:41:11 crc kubenswrapper[4612]: I1203 08:41:11.187198 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_df089922-13b4-43d5-beaf-8dff66c6e7cb/probe/0.log" Dec 03 08:41:11 crc kubenswrapper[4612]: I1203 08:41:11.405651 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-m45rk_27f1ab42-07b8-4697-ae0b-0afc5cb72e06/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:11 crc kubenswrapper[4612]: I1203 08:41:11.489572 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-l892k_f20be1ec-5ef4-4559-8a86-e857886c0856/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:11 crc kubenswrapper[4612]: I1203 08:41:11.676036 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-kcpm2_231aa396-d447-46be-b443-03de13ee8d90/init/0.log" Dec 03 08:41:11 crc kubenswrapper[4612]: I1203 08:41:11.931164 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-kcpm2_231aa396-d447-46be-b443-03de13ee8d90/init/0.log" Dec 03 08:41:12 crc kubenswrapper[4612]: I1203 08:41:12.155890 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-kcpm2_231aa396-d447-46be-b443-03de13ee8d90/dnsmasq-dns/0.log" Dec 03 08:41:12 crc kubenswrapper[4612]: I1203 08:41:12.225508 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv_8d6c8be7-a683-465a-9894-08b5ca61791e/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:12 crc kubenswrapper[4612]: I1203 08:41:12.373974 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_33add78e-3ec1-42dd-90aa-9df9f53028b3/glance-log/0.log" Dec 03 08:41:12 crc kubenswrapper[4612]: I1203 08:41:12.449745 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_33add78e-3ec1-42dd-90aa-9df9f53028b3/glance-httpd/0.log" Dec 03 08:41:12 crc kubenswrapper[4612]: I1203 08:41:12.625460 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f68a55b9-312b-42b5-a0ac-ffe92a4e81b8/glance-log/0.log" Dec 03 08:41:12 crc kubenswrapper[4612]: I1203 08:41:12.643506 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f68a55b9-312b-42b5-a0ac-ffe92a4e81b8/glance-httpd/0.log" Dec 03 08:41:13 crc kubenswrapper[4612]: I1203 08:41:13.409812 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-55fc5c6c94-pjh5s_29d52104-a465-4ca0-a040-d9dba9e47600/horizon/1.log" Dec 03 08:41:13 crc kubenswrapper[4612]: I1203 08:41:13.690085 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-55fc5c6c94-pjh5s_29d52104-a465-4ca0-a040-d9dba9e47600/horizon/0.log" Dec 03 08:41:13 crc kubenswrapper[4612]: I1203 08:41:13.882663 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj_237ab75b-84d9-4bd7-b235-2073221081f2/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:13 crc kubenswrapper[4612]: I1203 08:41:13.966512 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-55fc5c6c94-pjh5s_29d52104-a465-4ca0-a040-d9dba9e47600/horizon-log/0.log" Dec 03 08:41:14 crc kubenswrapper[4612]: I1203 08:41:14.077596 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-g56jm_eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:14 crc kubenswrapper[4612]: I1203 08:41:14.090920 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:41:14 crc kubenswrapper[4612]: E1203 08:41:14.091267 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:41:14 crc kubenswrapper[4612]: I1203 08:41:14.429775 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29412481-m7pjg_9594636b-c035-4a21-94d7-b54b4e73ef55/keystone-cron/0.log" Dec 03 08:41:14 crc kubenswrapper[4612]: I1203 08:41:14.501639 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_67686a82-cd3d-4b98-ab0f-b2e37c74a12f/kube-state-metrics/0.log" Dec 03 08:41:14 crc kubenswrapper[4612]: I1203 08:41:14.685664 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-664f766786-5w95f_0b3c004c-b094-4f83-b5c0-35fd59313980/keystone-api/0.log" Dec 03 08:41:14 crc kubenswrapper[4612]: I1203 08:41:14.800677 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv_7f0ab99d-079d-4eda-8308-33f1a44d5c3b/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:15 crc kubenswrapper[4612]: I1203 08:41:15.576117 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s_fe4761e2-fbfe-473c-bc56-fafd2d11559b/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:15 crc kubenswrapper[4612]: I1203 08:41:15.820912 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6f7b8b49c9-7g7fc_d86bd30e-374a-4a76-be08-89a4e3310b61/neutron-httpd/0.log" Dec 03 08:41:15 crc kubenswrapper[4612]: I1203 08:41:15.902874 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_b894480f-fa85-4215-8599-23743aa1c262/memcached/0.log" Dec 03 08:41:16 crc kubenswrapper[4612]: I1203 08:41:16.055934 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6f7b8b49c9-7g7fc_d86bd30e-374a-4a76-be08-89a4e3310b61/neutron-api/0.log" Dec 03 08:41:16 crc kubenswrapper[4612]: I1203 08:41:16.556144 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_e95122bf-4ece-43ec-9974-c8388713d7d6/nova-cell0-conductor-conductor/0.log" Dec 03 08:41:16 crc kubenswrapper[4612]: I1203 08:41:16.708338 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_cee4bc96-2455-41b3-a3cc-d743fab6e761/nova-cell1-conductor-conductor/0.log" Dec 03 08:41:17 crc kubenswrapper[4612]: I1203 08:41:17.101770 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_513ba267-34e9-4883-9eb1-55f5441813db/nova-cell1-novncproxy-novncproxy/0.log" Dec 03 08:41:17 crc kubenswrapper[4612]: I1203 08:41:17.152023 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-sc9jx_f9b6eb07-a99d-4365-b819-81f008e2018d/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:17 crc kubenswrapper[4612]: I1203 08:41:17.395692 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_bfb6ea50-d549-413b-b2b0-70f5d0e38954/nova-api-log/0.log" Dec 03 08:41:17 crc kubenswrapper[4612]: I1203 08:41:17.530451 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_28910ce9-c2c5-484e-bacd-9170253c2e83/nova-metadata-log/0.log" Dec 03 08:41:17 crc kubenswrapper[4612]: I1203 08:41:17.716459 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_bfb6ea50-d549-413b-b2b0-70f5d0e38954/nova-api-api/0.log" Dec 03 08:41:18 crc kubenswrapper[4612]: I1203 08:41:18.306700 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_af40404d-eb38-4281-ae78-fa546de7d6a2/mysql-bootstrap/0.log" Dec 03 08:41:18 crc kubenswrapper[4612]: I1203 08:41:18.403712 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_cf66f76e-bca2-466f-b672-bfe680810c3e/nova-scheduler-scheduler/0.log" Dec 03 08:41:18 crc kubenswrapper[4612]: I1203 08:41:18.514218 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_af40404d-eb38-4281-ae78-fa546de7d6a2/mysql-bootstrap/0.log" Dec 03 08:41:18 crc kubenswrapper[4612]: I1203 08:41:18.593315 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_af40404d-eb38-4281-ae78-fa546de7d6a2/galera/0.log" Dec 03 08:41:18 crc kubenswrapper[4612]: I1203 08:41:18.761465 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_10aae93b-2a6b-4a5a-a27e-9c2714777dfb/mysql-bootstrap/0.log" Dec 03 08:41:18 crc kubenswrapper[4612]: I1203 08:41:18.899914 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_28910ce9-c2c5-484e-bacd-9170253c2e83/nova-metadata-metadata/0.log" Dec 03 08:41:18 crc kubenswrapper[4612]: I1203 08:41:18.986427 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_10aae93b-2a6b-4a5a-a27e-9c2714777dfb/mysql-bootstrap/0.log" Dec 03 08:41:19 crc kubenswrapper[4612]: I1203 08:41:19.013078 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_6b63827f-76e8-454f-9243-6c05f9e3c2fd/openstackclient/0.log" Dec 03 08:41:19 crc kubenswrapper[4612]: I1203 08:41:19.146161 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_10aae93b-2a6b-4a5a-a27e-9c2714777dfb/galera/0.log" Dec 03 08:41:19 crc kubenswrapper[4612]: I1203 08:41:19.292430 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-j7748_385edacb-e835-42f4-a521-7c321043b989/ovn-controller/0.log" Dec 03 08:41:19 crc kubenswrapper[4612]: I1203 08:41:19.427641 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-zc5qp_07101baf-17d1-4e64-8c8a-4ee57ab33873/openstack-network-exporter/0.log" Dec 03 08:41:19 crc kubenswrapper[4612]: I1203 08:41:19.491640 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ng85x_d487d317-a480-4234-9db2-b9018c5c5e38/ovsdb-server-init/0.log" Dec 03 08:41:19 crc kubenswrapper[4612]: I1203 08:41:19.694823 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ng85x_d487d317-a480-4234-9db2-b9018c5c5e38/ovs-vswitchd/0.log" Dec 03 08:41:19 crc kubenswrapper[4612]: I1203 08:41:19.719337 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ng85x_d487d317-a480-4234-9db2-b9018c5c5e38/ovsdb-server/0.log" Dec 03 08:41:19 crc kubenswrapper[4612]: I1203 08:41:19.748335 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ng85x_d487d317-a480-4234-9db2-b9018c5c5e38/ovsdb-server-init/0.log" Dec 03 08:41:19 crc kubenswrapper[4612]: I1203 08:41:19.797173 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-6ztrk_44b9b74b-2985-47c3-aec4-304bfc2d6122/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:20 crc kubenswrapper[4612]: I1203 08:41:20.044670 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_d38a92e9-cf02-4966-9bb1-4ea642490d00/openstack-network-exporter/0.log" Dec 03 08:41:20 crc kubenswrapper[4612]: I1203 08:41:20.045652 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_d38a92e9-cf02-4966-9bb1-4ea642490d00/ovn-northd/0.log" Dec 03 08:41:20 crc kubenswrapper[4612]: I1203 08:41:20.178608 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_9463ced8-f487-4a16-9af3-07b736ca556c/openstack-network-exporter/0.log" Dec 03 08:41:20 crc kubenswrapper[4612]: I1203 08:41:20.293609 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_9463ced8-f487-4a16-9af3-07b736ca556c/ovsdbserver-nb/0.log" Dec 03 08:41:20 crc kubenswrapper[4612]: I1203 08:41:20.312040 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_76717115-6292-47aa-bc1a-90c5e618967b/openstack-network-exporter/0.log" Dec 03 08:41:20 crc kubenswrapper[4612]: I1203 08:41:20.350996 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_76717115-6292-47aa-bc1a-90c5e618967b/ovsdbserver-sb/0.log" Dec 03 08:41:20 crc kubenswrapper[4612]: I1203 08:41:20.635749 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6ed2435b-f44d-4468-baec-035755359147/setup-container/0.log" Dec 03 08:41:20 crc kubenswrapper[4612]: I1203 08:41:20.703313 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5cd4567696-hsfd9_1f9cee01-af98-4b34-b263-ae543c237e0b/placement-api/0.log" Dec 03 08:41:20 crc kubenswrapper[4612]: I1203 08:41:20.817723 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5cd4567696-hsfd9_1f9cee01-af98-4b34-b263-ae543c237e0b/placement-log/0.log" Dec 03 08:41:20 crc kubenswrapper[4612]: I1203 08:41:20.984456 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6ed2435b-f44d-4468-baec-035755359147/setup-container/0.log" Dec 03 08:41:20 crc kubenswrapper[4612]: I1203 08:41:20.997759 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6ed2435b-f44d-4468-baec-035755359147/rabbitmq/0.log" Dec 03 08:41:21 crc kubenswrapper[4612]: I1203 08:41:21.133109 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_08764b2d-6ed4-4495-8338-03d2af8dcbdd/setup-container/0.log" Dec 03 08:41:21 crc kubenswrapper[4612]: I1203 08:41:21.384755 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_08764b2d-6ed4-4495-8338-03d2af8dcbdd/setup-container/0.log" Dec 03 08:41:21 crc kubenswrapper[4612]: I1203 08:41:21.428861 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf_4ad5399d-3f69-4f51-bc8c-9245e721bcfd/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:21 crc kubenswrapper[4612]: I1203 08:41:21.437730 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_08764b2d-6ed4-4495-8338-03d2af8dcbdd/rabbitmq/0.log" Dec 03 08:41:21 crc kubenswrapper[4612]: I1203 08:41:21.646719 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-qvcck_41c7b385-5686-4d88-a86a-072eb493e1a2/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:21 crc kubenswrapper[4612]: I1203 08:41:21.672388 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr_18af4e51-62fc-4bba-8afd-c8b743e70852/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:21 crc kubenswrapper[4612]: I1203 08:41:21.747589 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-vwxdw_18e6e5c3-b90b-429e-9b89-c94e3f20ecdd/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:21 crc kubenswrapper[4612]: I1203 08:41:21.986071 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-bkpsr_7a053178-34a7-49d7-8119-09c53336a553/ssh-known-hosts-edpm-deployment/0.log" Dec 03 08:41:22 crc kubenswrapper[4612]: I1203 08:41:22.010082 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-7b57f75fd5-642lv_2528552f-220d-4b33-990a-7793d5d8987a/proxy-server/0.log" Dec 03 08:41:22 crc kubenswrapper[4612]: I1203 08:41:22.157672 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-7b57f75fd5-642lv_2528552f-220d-4b33-990a-7793d5d8987a/proxy-httpd/0.log" Dec 03 08:41:22 crc kubenswrapper[4612]: I1203 08:41:22.276774 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-p474m_0aff4c7e-d189-4658-b1a6-388353c8dfa8/swift-ring-rebalance/0.log" Dec 03 08:41:22 crc kubenswrapper[4612]: I1203 08:41:22.348843 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/account-auditor/0.log" Dec 03 08:41:22 crc kubenswrapper[4612]: I1203 08:41:22.433332 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/account-reaper/0.log" Dec 03 08:41:22 crc kubenswrapper[4612]: I1203 08:41:22.663395 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/account-replicator/0.log" Dec 03 08:41:22 crc kubenswrapper[4612]: I1203 08:41:22.687983 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/container-auditor/0.log" Dec 03 08:41:22 crc kubenswrapper[4612]: I1203 08:41:22.768271 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/container-replicator/0.log" Dec 03 08:41:22 crc kubenswrapper[4612]: I1203 08:41:22.831390 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/account-server/0.log" Dec 03 08:41:22 crc kubenswrapper[4612]: I1203 08:41:22.894875 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/container-server/0.log" Dec 03 08:41:22 crc kubenswrapper[4612]: I1203 08:41:22.923559 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/container-updater/0.log" Dec 03 08:41:22 crc kubenswrapper[4612]: I1203 08:41:22.972456 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/object-auditor/0.log" Dec 03 08:41:23 crc kubenswrapper[4612]: I1203 08:41:23.077188 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/object-expirer/0.log" Dec 03 08:41:23 crc kubenswrapper[4612]: I1203 08:41:23.102543 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/object-replicator/0.log" Dec 03 08:41:23 crc kubenswrapper[4612]: I1203 08:41:23.255791 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/object-server/0.log" Dec 03 08:41:23 crc kubenswrapper[4612]: I1203 08:41:23.295099 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/object-updater/0.log" Dec 03 08:41:23 crc kubenswrapper[4612]: I1203 08:41:23.319526 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/rsync/0.log" Dec 03 08:41:23 crc kubenswrapper[4612]: I1203 08:41:23.390194 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/swift-recon-cron/0.log" Dec 03 08:41:23 crc kubenswrapper[4612]: I1203 08:41:23.611096 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-4gccm_063f10ac-9f99-4bae-9eae-ec9d2ebb773f/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:24 crc kubenswrapper[4612]: I1203 08:41:24.229448 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_562541dd-f13c-47e5-9411-cf0d9a7c0a54/test-operator-logs-container/0.log" Dec 03 08:41:24 crc kubenswrapper[4612]: I1203 08:41:24.249326 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_5cc1684d-023d-46c3-8f87-3e91941a34e9/tempest-tests-tempest-tests-runner/0.log" Dec 03 08:41:24 crc kubenswrapper[4612]: I1203 08:41:24.384784 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-p95mm_7624e359-51e1-46df-829a-12aebc8d3688/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:41:27 crc kubenswrapper[4612]: I1203 08:41:27.096573 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:41:27 crc kubenswrapper[4612]: E1203 08:41:27.105636 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:41:38 crc kubenswrapper[4612]: I1203 08:41:38.089494 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:41:38 crc kubenswrapper[4612]: E1203 08:41:38.090224 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:41:51 crc kubenswrapper[4612]: I1203 08:41:51.090028 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:41:51 crc kubenswrapper[4612]: E1203 08:41:51.090786 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:41:54 crc kubenswrapper[4612]: I1203 08:41:54.518885 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k_acb105f8-b0d1-48ce-81cf-e0f2f1a6202d/util/0.log" Dec 03 08:41:54 crc kubenswrapper[4612]: I1203 08:41:54.727189 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k_acb105f8-b0d1-48ce-81cf-e0f2f1a6202d/util/0.log" Dec 03 08:41:54 crc kubenswrapper[4612]: I1203 08:41:54.778696 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k_acb105f8-b0d1-48ce-81cf-e0f2f1a6202d/pull/0.log" Dec 03 08:41:55 crc kubenswrapper[4612]: I1203 08:41:55.021444 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k_acb105f8-b0d1-48ce-81cf-e0f2f1a6202d/pull/0.log" Dec 03 08:41:55 crc kubenswrapper[4612]: I1203 08:41:55.224590 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k_acb105f8-b0d1-48ce-81cf-e0f2f1a6202d/util/0.log" Dec 03 08:41:55 crc kubenswrapper[4612]: I1203 08:41:55.269968 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k_acb105f8-b0d1-48ce-81cf-e0f2f1a6202d/pull/0.log" Dec 03 08:41:55 crc kubenswrapper[4612]: I1203 08:41:55.311876 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k_acb105f8-b0d1-48ce-81cf-e0f2f1a6202d/extract/0.log" Dec 03 08:41:55 crc kubenswrapper[4612]: I1203 08:41:55.516051 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-n2rzg_3d3cecf1-2f48-4b22-9350-870d25e786ef/kube-rbac-proxy/0.log" Dec 03 08:41:55 crc kubenswrapper[4612]: I1203 08:41:55.576462 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-n2rzg_3d3cecf1-2f48-4b22-9350-870d25e786ef/manager/0.log" Dec 03 08:41:55 crc kubenswrapper[4612]: I1203 08:41:55.667698 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-2pxgx_3c3eb81e-314d-486d-afa4-443f33c54510/kube-rbac-proxy/0.log" Dec 03 08:41:55 crc kubenswrapper[4612]: I1203 08:41:55.804526 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-2pxgx_3c3eb81e-314d-486d-afa4-443f33c54510/manager/0.log" Dec 03 08:41:55 crc kubenswrapper[4612]: I1203 08:41:55.911503 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-4cjnp_9f7c1634-c25d-4fc1-92bd-d95ef05c7868/manager/0.log" Dec 03 08:41:55 crc kubenswrapper[4612]: I1203 08:41:55.927591 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-4cjnp_9f7c1634-c25d-4fc1-92bd-d95ef05c7868/kube-rbac-proxy/0.log" Dec 03 08:41:56 crc kubenswrapper[4612]: I1203 08:41:56.082207 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-tklpk_d75ef15b-d718-436d-b570-21416a0c4021/kube-rbac-proxy/0.log" Dec 03 08:41:56 crc kubenswrapper[4612]: I1203 08:41:56.237671 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-tklpk_d75ef15b-d718-436d-b570-21416a0c4021/manager/0.log" Dec 03 08:41:56 crc kubenswrapper[4612]: I1203 08:41:56.282172 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-5rg69_b8715491-d469-4ade-8434-765685a955db/kube-rbac-proxy/0.log" Dec 03 08:41:56 crc kubenswrapper[4612]: I1203 08:41:56.348978 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-5rg69_b8715491-d469-4ade-8434-765685a955db/manager/0.log" Dec 03 08:41:56 crc kubenswrapper[4612]: I1203 08:41:56.472671 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-bs99s_c2db30ef-0db3-44d4-b276-3b81195d4962/kube-rbac-proxy/0.log" Dec 03 08:41:56 crc kubenswrapper[4612]: I1203 08:41:56.717631 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-bs99s_c2db30ef-0db3-44d4-b276-3b81195d4962/manager/0.log" Dec 03 08:41:56 crc kubenswrapper[4612]: I1203 08:41:56.790534 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-rdf2f_d70b0d51-8225-4d2b-b128-aeda29446ab9/kube-rbac-proxy/0.log" Dec 03 08:41:56 crc kubenswrapper[4612]: I1203 08:41:56.948927 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-rdf2f_d70b0d51-8225-4d2b-b128-aeda29446ab9/manager/0.log" Dec 03 08:41:57 crc kubenswrapper[4612]: I1203 08:41:57.013657 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-6mbrd_39305f1e-8b3f-43aa-97d4-48410cc7fe91/kube-rbac-proxy/0.log" Dec 03 08:41:57 crc kubenswrapper[4612]: I1203 08:41:57.066807 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-6mbrd_39305f1e-8b3f-43aa-97d4-48410cc7fe91/manager/0.log" Dec 03 08:41:57 crc kubenswrapper[4612]: I1203 08:41:57.164790 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-flkb9_58adadbb-3706-4f8c-be33-31836f4860e5/kube-rbac-proxy/0.log" Dec 03 08:41:57 crc kubenswrapper[4612]: I1203 08:41:57.410368 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-d98bb_790a3a61-40c9-4360-b7b6-9f08edbec437/kube-rbac-proxy/0.log" Dec 03 08:41:57 crc kubenswrapper[4612]: I1203 08:41:57.413524 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-flkb9_58adadbb-3706-4f8c-be33-31836f4860e5/manager/0.log" Dec 03 08:41:57 crc kubenswrapper[4612]: I1203 08:41:57.470625 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-d98bb_790a3a61-40c9-4360-b7b6-9f08edbec437/manager/0.log" Dec 03 08:41:57 crc kubenswrapper[4612]: I1203 08:41:57.705656 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-m898g_e4cadc4e-ebfd-4886-83a2-1caf4aef2b68/manager/0.log" Dec 03 08:41:57 crc kubenswrapper[4612]: I1203 08:41:57.713166 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-m898g_e4cadc4e-ebfd-4886-83a2-1caf4aef2b68/kube-rbac-proxy/0.log" Dec 03 08:41:58 crc kubenswrapper[4612]: I1203 08:41:58.037062 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-r75nm_d9951bd4-0756-4c79-96b0-ceaac8a1e51a/manager/0.log" Dec 03 08:41:58 crc kubenswrapper[4612]: I1203 08:41:58.047255 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-r75nm_d9951bd4-0756-4c79-96b0-ceaac8a1e51a/kube-rbac-proxy/0.log" Dec 03 08:41:58 crc kubenswrapper[4612]: I1203 08:41:58.519018 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-4vp27_24a39a3f-a75f-4029-b861-cf683db5aae2/kube-rbac-proxy/0.log" Dec 03 08:41:58 crc kubenswrapper[4612]: I1203 08:41:58.610437 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-4vp27_24a39a3f-a75f-4029-b861-cf683db5aae2/manager/0.log" Dec 03 08:41:58 crc kubenswrapper[4612]: I1203 08:41:58.646672 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-b2s4p_ec8611ec-2e0f-4906-af03-7dc350e7e783/kube-rbac-proxy/0.log" Dec 03 08:41:58 crc kubenswrapper[4612]: I1203 08:41:58.728823 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-b2s4p_ec8611ec-2e0f-4906-af03-7dc350e7e783/manager/0.log" Dec 03 08:41:58 crc kubenswrapper[4612]: I1203 08:41:58.842731 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9_a9a1ffeb-b3b4-4b07-911b-b829962b6827/kube-rbac-proxy/0.log" Dec 03 08:41:58 crc kubenswrapper[4612]: I1203 08:41:58.883321 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9_a9a1ffeb-b3b4-4b07-911b-b829962b6827/manager/0.log" Dec 03 08:41:59 crc kubenswrapper[4612]: I1203 08:41:59.325454 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-vf48w_5c654dcd-f507-4d31-b3d7-7230cc7cb086/registry-server/0.log" Dec 03 08:41:59 crc kubenswrapper[4612]: I1203 08:41:59.331471 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-8f4757f6b-g7ncc_097f95f4-4fc4-43c0-aefd-da8b3c0111f3/operator/0.log" Dec 03 08:41:59 crc kubenswrapper[4612]: I1203 08:41:59.507466 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-jbfkr_99188e23-da4a-4d43-8778-a2a0b9e962dc/kube-rbac-proxy/0.log" Dec 03 08:41:59 crc kubenswrapper[4612]: I1203 08:41:59.838448 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-jbfkr_99188e23-da4a-4d43-8778-a2a0b9e962dc/manager/0.log" Dec 03 08:41:59 crc kubenswrapper[4612]: I1203 08:41:59.861159 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-w62v7_1ac4eade-01c8-4323-8796-6b2d39a7ee36/manager/0.log" Dec 03 08:41:59 crc kubenswrapper[4612]: I1203 08:41:59.904436 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-w62v7_1ac4eade-01c8-4323-8796-6b2d39a7ee36/kube-rbac-proxy/0.log" Dec 03 08:42:00 crc kubenswrapper[4612]: I1203 08:42:00.147893 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-6fzpq_8fd04295-8c24-459f-b2d5-1fee88165e78/operator/0.log" Dec 03 08:42:00 crc kubenswrapper[4612]: I1203 08:42:00.154214 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-64f7f94cfd-bdkfw_f6260167-bcb0-4f9e-8a44-6cd47d248296/manager/0.log" Dec 03 08:42:00 crc kubenswrapper[4612]: I1203 08:42:00.924914 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-kv6n4_5a8351bf-c4cf-40fc-8df9-22b3064770a3/manager/0.log" Dec 03 08:42:00 crc kubenswrapper[4612]: I1203 08:42:00.971159 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-jbbw2_2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2/kube-rbac-proxy/0.log" Dec 03 08:42:01 crc kubenswrapper[4612]: I1203 08:42:01.011594 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-kv6n4_5a8351bf-c4cf-40fc-8df9-22b3064770a3/kube-rbac-proxy/0.log" Dec 03 08:42:01 crc kubenswrapper[4612]: I1203 08:42:01.070736 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-jbbw2_2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2/manager/0.log" Dec 03 08:42:01 crc kubenswrapper[4612]: I1203 08:42:01.246438 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-snwlr_b52c7da9-b392-448b-a04a-1afa333df442/manager/0.log" Dec 03 08:42:01 crc kubenswrapper[4612]: I1203 08:42:01.286838 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-snwlr_b52c7da9-b392-448b-a04a-1afa333df442/kube-rbac-proxy/0.log" Dec 03 08:42:01 crc kubenswrapper[4612]: I1203 08:42:01.314127 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-495qs_797a523a-540b-4b10-a294-0543192f0c56/kube-rbac-proxy/0.log" Dec 03 08:42:01 crc kubenswrapper[4612]: I1203 08:42:01.456887 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-495qs_797a523a-540b-4b10-a294-0543192f0c56/manager/0.log" Dec 03 08:42:03 crc kubenswrapper[4612]: I1203 08:42:03.093750 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:42:03 crc kubenswrapper[4612]: E1203 08:42:03.094102 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:42:17 crc kubenswrapper[4612]: I1203 08:42:17.089214 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:42:17 crc kubenswrapper[4612]: E1203 08:42:17.090090 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:42:25 crc kubenswrapper[4612]: I1203 08:42:25.948800 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-dzht7_db28a622-4b74-49e9-bd91-6f2a253583fe/control-plane-machine-set-operator/0.log" Dec 03 08:42:26 crc kubenswrapper[4612]: I1203 08:42:26.185521 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9vwq6_749db599-e347-4a7b-9ff8-9c33514ee64a/kube-rbac-proxy/0.log" Dec 03 08:42:26 crc kubenswrapper[4612]: I1203 08:42:26.303827 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9vwq6_749db599-e347-4a7b-9ff8-9c33514ee64a/machine-api-operator/0.log" Dec 03 08:42:31 crc kubenswrapper[4612]: I1203 08:42:31.089708 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:42:31 crc kubenswrapper[4612]: E1203 08:42:31.090670 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:42:42 crc kubenswrapper[4612]: I1203 08:42:42.585434 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-4kwk4_a8b94ba1-6d11-4835-9ee9-e1756681dc3e/cert-manager-controller/0.log" Dec 03 08:42:42 crc kubenswrapper[4612]: I1203 08:42:42.840453 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-6gdmj_f221cbcd-d224-483f-b688-6d877302a502/cert-manager-cainjector/0.log" Dec 03 08:42:42 crc kubenswrapper[4612]: I1203 08:42:42.968817 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-28nxc_ac4674d9-ae54-48a4-858b-75a91546ddd9/cert-manager-webhook/0.log" Dec 03 08:42:46 crc kubenswrapper[4612]: I1203 08:42:46.090300 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:42:46 crc kubenswrapper[4612]: E1203 08:42:46.091171 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:42:57 crc kubenswrapper[4612]: I1203 08:42:57.095908 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:42:57 crc kubenswrapper[4612]: E1203 08:42:57.096886 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:42:57 crc kubenswrapper[4612]: I1203 08:42:57.873995 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-qgl65_bd325177-e3fc-476e-b59f-363f1bc2fe0a/nmstate-console-plugin/0.log" Dec 03 08:42:58 crc kubenswrapper[4612]: I1203 08:42:58.071549 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-4mtzb_3dfc52ec-c1e6-4162-bccf-6fb5a855212f/nmstate-handler/0.log" Dec 03 08:42:58 crc kubenswrapper[4612]: I1203 08:42:58.201670 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-7dp5q_0eea55b7-ecfe-4fc9-bff2-061da172743a/kube-rbac-proxy/0.log" Dec 03 08:42:58 crc kubenswrapper[4612]: I1203 08:42:58.275604 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-7dp5q_0eea55b7-ecfe-4fc9-bff2-061da172743a/nmstate-metrics/0.log" Dec 03 08:42:58 crc kubenswrapper[4612]: I1203 08:42:58.432289 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-vlsmd_b1d96da3-e5c3-46d4-b29b-6121d6e4d112/nmstate-operator/0.log" Dec 03 08:42:58 crc kubenswrapper[4612]: I1203 08:42:58.563825 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-hnhsj_a69b0d1e-d485-4424-a5a0-607e997bbaf6/nmstate-webhook/0.log" Dec 03 08:43:09 crc kubenswrapper[4612]: I1203 08:43:09.089878 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:43:09 crc kubenswrapper[4612]: E1203 08:43:09.090796 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:43:14 crc kubenswrapper[4612]: I1203 08:43:14.919393 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-gnqmc_a982bc05-54be-4a0b-8e04-1e566601060d/kube-rbac-proxy/0.log" Dec 03 08:43:15 crc kubenswrapper[4612]: I1203 08:43:15.041632 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-gnqmc_a982bc05-54be-4a0b-8e04-1e566601060d/controller/0.log" Dec 03 08:43:15 crc kubenswrapper[4612]: I1203 08:43:15.323309 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-frr-files/0.log" Dec 03 08:43:15 crc kubenswrapper[4612]: I1203 08:43:15.776501 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-metrics/0.log" Dec 03 08:43:15 crc kubenswrapper[4612]: I1203 08:43:15.806003 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-reloader/0.log" Dec 03 08:43:15 crc kubenswrapper[4612]: I1203 08:43:15.832228 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-reloader/0.log" Dec 03 08:43:15 crc kubenswrapper[4612]: I1203 08:43:15.855675 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-frr-files/0.log" Dec 03 08:43:16 crc kubenswrapper[4612]: I1203 08:43:16.088337 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-frr-files/0.log" Dec 03 08:43:16 crc kubenswrapper[4612]: I1203 08:43:16.141394 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-reloader/0.log" Dec 03 08:43:16 crc kubenswrapper[4612]: I1203 08:43:16.174822 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-metrics/0.log" Dec 03 08:43:16 crc kubenswrapper[4612]: I1203 08:43:16.199189 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-metrics/0.log" Dec 03 08:43:16 crc kubenswrapper[4612]: I1203 08:43:16.501537 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-reloader/0.log" Dec 03 08:43:16 crc kubenswrapper[4612]: I1203 08:43:16.514086 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-frr-files/0.log" Dec 03 08:43:16 crc kubenswrapper[4612]: I1203 08:43:16.541724 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-metrics/0.log" Dec 03 08:43:16 crc kubenswrapper[4612]: I1203 08:43:16.554265 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/controller/0.log" Dec 03 08:43:16 crc kubenswrapper[4612]: I1203 08:43:16.796381 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/frr-metrics/0.log" Dec 03 08:43:16 crc kubenswrapper[4612]: I1203 08:43:16.861640 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/kube-rbac-proxy/0.log" Dec 03 08:43:16 crc kubenswrapper[4612]: I1203 08:43:16.903934 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/kube-rbac-proxy-frr/0.log" Dec 03 08:43:17 crc kubenswrapper[4612]: I1203 08:43:17.185687 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/reloader/0.log" Dec 03 08:43:17 crc kubenswrapper[4612]: I1203 08:43:17.209306 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-5r284_fd2ac083-0876-4383-ba05-5493cd25e480/frr-k8s-webhook-server/0.log" Dec 03 08:43:17 crc kubenswrapper[4612]: I1203 08:43:17.709846 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-67fc746f5d-s9gvn_bd467808-b93d-47ee-bdf3-8e6f29bf3506/manager/0.log" Dec 03 08:43:17 crc kubenswrapper[4612]: I1203 08:43:17.974273 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6b85868c59-dznbc_ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d/webhook-server/0.log" Dec 03 08:43:17 crc kubenswrapper[4612]: I1203 08:43:17.986618 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/frr/0.log" Dec 03 08:43:18 crc kubenswrapper[4612]: I1203 08:43:18.105716 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-6wm95_410e9076-5ebb-45a7-880a-77bffe01911b/kube-rbac-proxy/0.log" Dec 03 08:43:18 crc kubenswrapper[4612]: I1203 08:43:18.495527 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-6wm95_410e9076-5ebb-45a7-880a-77bffe01911b/speaker/0.log" Dec 03 08:43:21 crc kubenswrapper[4612]: I1203 08:43:21.090312 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:43:21 crc kubenswrapper[4612]: E1203 08:43:21.091395 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:43:33 crc kubenswrapper[4612]: I1203 08:43:33.953497 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr_4626e1e5-23a3-47d8-98ae-3a4ee0c39c92/util/0.log" Dec 03 08:43:34 crc kubenswrapper[4612]: I1203 08:43:34.089988 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:43:34 crc kubenswrapper[4612]: E1203 08:43:34.090173 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:43:34 crc kubenswrapper[4612]: I1203 08:43:34.124835 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr_4626e1e5-23a3-47d8-98ae-3a4ee0c39c92/util/0.log" Dec 03 08:43:34 crc kubenswrapper[4612]: I1203 08:43:34.178408 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr_4626e1e5-23a3-47d8-98ae-3a4ee0c39c92/pull/0.log" Dec 03 08:43:34 crc kubenswrapper[4612]: I1203 08:43:34.206861 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr_4626e1e5-23a3-47d8-98ae-3a4ee0c39c92/pull/0.log" Dec 03 08:43:34 crc kubenswrapper[4612]: I1203 08:43:34.350864 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr_4626e1e5-23a3-47d8-98ae-3a4ee0c39c92/util/0.log" Dec 03 08:43:34 crc kubenswrapper[4612]: I1203 08:43:34.449158 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr_4626e1e5-23a3-47d8-98ae-3a4ee0c39c92/pull/0.log" Dec 03 08:43:34 crc kubenswrapper[4612]: I1203 08:43:34.458481 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr_4626e1e5-23a3-47d8-98ae-3a4ee0c39c92/extract/0.log" Dec 03 08:43:34 crc kubenswrapper[4612]: I1203 08:43:34.565685 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72_2fe33ad3-592c-48e5-83ec-a919da42fd49/util/0.log" Dec 03 08:43:34 crc kubenswrapper[4612]: I1203 08:43:34.736040 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72_2fe33ad3-592c-48e5-83ec-a919da42fd49/util/0.log" Dec 03 08:43:35 crc kubenswrapper[4612]: I1203 08:43:35.289173 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72_2fe33ad3-592c-48e5-83ec-a919da42fd49/pull/0.log" Dec 03 08:43:35 crc kubenswrapper[4612]: I1203 08:43:35.301845 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72_2fe33ad3-592c-48e5-83ec-a919da42fd49/pull/0.log" Dec 03 08:43:35 crc kubenswrapper[4612]: I1203 08:43:35.466731 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72_2fe33ad3-592c-48e5-83ec-a919da42fd49/util/0.log" Dec 03 08:43:35 crc kubenswrapper[4612]: I1203 08:43:35.489293 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72_2fe33ad3-592c-48e5-83ec-a919da42fd49/extract/0.log" Dec 03 08:43:35 crc kubenswrapper[4612]: I1203 08:43:35.511315 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72_2fe33ad3-592c-48e5-83ec-a919da42fd49/pull/0.log" Dec 03 08:43:35 crc kubenswrapper[4612]: I1203 08:43:35.682211 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ct4c4_6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619/extract-utilities/0.log" Dec 03 08:43:35 crc kubenswrapper[4612]: I1203 08:43:35.843656 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ct4c4_6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619/extract-content/0.log" Dec 03 08:43:35 crc kubenswrapper[4612]: I1203 08:43:35.864303 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ct4c4_6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619/extract-content/0.log" Dec 03 08:43:35 crc kubenswrapper[4612]: I1203 08:43:35.888334 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ct4c4_6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619/extract-utilities/0.log" Dec 03 08:43:36 crc kubenswrapper[4612]: I1203 08:43:36.019010 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ct4c4_6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619/extract-utilities/0.log" Dec 03 08:43:36 crc kubenswrapper[4612]: I1203 08:43:36.036924 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ct4c4_6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619/extract-content/0.log" Dec 03 08:43:36 crc kubenswrapper[4612]: I1203 08:43:36.299135 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-x4zn7_c1ce7b50-2a84-44dc-9398-24bc9f03f745/extract-utilities/0.log" Dec 03 08:43:36 crc kubenswrapper[4612]: I1203 08:43:36.467523 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ct4c4_6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619/registry-server/0.log" Dec 03 08:43:36 crc kubenswrapper[4612]: I1203 08:43:36.897227 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-x4zn7_c1ce7b50-2a84-44dc-9398-24bc9f03f745/extract-content/0.log" Dec 03 08:43:36 crc kubenswrapper[4612]: I1203 08:43:36.918496 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-x4zn7_c1ce7b50-2a84-44dc-9398-24bc9f03f745/extract-utilities/0.log" Dec 03 08:43:36 crc kubenswrapper[4612]: I1203 08:43:36.934570 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-x4zn7_c1ce7b50-2a84-44dc-9398-24bc9f03f745/extract-content/0.log" Dec 03 08:43:37 crc kubenswrapper[4612]: I1203 08:43:37.118915 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-x4zn7_c1ce7b50-2a84-44dc-9398-24bc9f03f745/extract-content/0.log" Dec 03 08:43:37 crc kubenswrapper[4612]: I1203 08:43:37.145957 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-x4zn7_c1ce7b50-2a84-44dc-9398-24bc9f03f745/extract-utilities/0.log" Dec 03 08:43:37 crc kubenswrapper[4612]: I1203 08:43:37.413756 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-x2kxn_4db345c6-2ee2-4acf-9be4-a705bddb07fe/marketplace-operator/0.log" Dec 03 08:43:37 crc kubenswrapper[4612]: I1203 08:43:37.554252 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-npj5l_ecf12e7f-21e7-40f0-bdb4-e07c8437cef8/extract-utilities/0.log" Dec 03 08:43:37 crc kubenswrapper[4612]: I1203 08:43:37.747865 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-npj5l_ecf12e7f-21e7-40f0-bdb4-e07c8437cef8/extract-utilities/0.log" Dec 03 08:43:37 crc kubenswrapper[4612]: I1203 08:43:37.831792 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-npj5l_ecf12e7f-21e7-40f0-bdb4-e07c8437cef8/extract-content/0.log" Dec 03 08:43:37 crc kubenswrapper[4612]: I1203 08:43:37.853079 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-x4zn7_c1ce7b50-2a84-44dc-9398-24bc9f03f745/registry-server/0.log" Dec 03 08:43:37 crc kubenswrapper[4612]: I1203 08:43:37.858979 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-npj5l_ecf12e7f-21e7-40f0-bdb4-e07c8437cef8/extract-content/0.log" Dec 03 08:43:38 crc kubenswrapper[4612]: I1203 08:43:38.052533 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-npj5l_ecf12e7f-21e7-40f0-bdb4-e07c8437cef8/extract-content/0.log" Dec 03 08:43:38 crc kubenswrapper[4612]: I1203 08:43:38.065101 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-npj5l_ecf12e7f-21e7-40f0-bdb4-e07c8437cef8/extract-utilities/0.log" Dec 03 08:43:38 crc kubenswrapper[4612]: I1203 08:43:38.127537 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qbglv_a3e32e0f-ff72-43ff-8afb-54fbf1be823a/extract-utilities/0.log" Dec 03 08:43:38 crc kubenswrapper[4612]: I1203 08:43:38.283061 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-npj5l_ecf12e7f-21e7-40f0-bdb4-e07c8437cef8/registry-server/0.log" Dec 03 08:43:38 crc kubenswrapper[4612]: I1203 08:43:38.335425 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qbglv_a3e32e0f-ff72-43ff-8afb-54fbf1be823a/extract-utilities/0.log" Dec 03 08:43:38 crc kubenswrapper[4612]: I1203 08:43:38.380260 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qbglv_a3e32e0f-ff72-43ff-8afb-54fbf1be823a/extract-content/0.log" Dec 03 08:43:38 crc kubenswrapper[4612]: I1203 08:43:38.401319 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qbglv_a3e32e0f-ff72-43ff-8afb-54fbf1be823a/extract-content/0.log" Dec 03 08:43:38 crc kubenswrapper[4612]: I1203 08:43:38.519625 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qbglv_a3e32e0f-ff72-43ff-8afb-54fbf1be823a/extract-utilities/0.log" Dec 03 08:43:38 crc kubenswrapper[4612]: I1203 08:43:38.585192 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qbglv_a3e32e0f-ff72-43ff-8afb-54fbf1be823a/extract-content/0.log" Dec 03 08:43:39 crc kubenswrapper[4612]: I1203 08:43:39.005803 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qbglv_a3e32e0f-ff72-43ff-8afb-54fbf1be823a/registry-server/0.log" Dec 03 08:43:45 crc kubenswrapper[4612]: I1203 08:43:45.089664 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:43:45 crc kubenswrapper[4612]: E1203 08:43:45.090642 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:43:58 crc kubenswrapper[4612]: I1203 08:43:58.089445 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:43:58 crc kubenswrapper[4612]: I1203 08:43:58.880869 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"0bcf830ffe2e79df0eb12515a9792b4731d2ecc59a39866247af5869720338dc"} Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.187955 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f"] Dec 03 08:45:00 crc kubenswrapper[4612]: E1203 08:45:00.189085 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98d0d710-2713-425b-8d79-b2f8da9f4c4b" containerName="container-00" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.189101 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="98d0d710-2713-425b-8d79-b2f8da9f4c4b" containerName="container-00" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.189323 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="98d0d710-2713-425b-8d79-b2f8da9f4c4b" containerName="container-00" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.190108 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.194999 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.195029 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.200673 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f"] Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.279228 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98wrx\" (UniqueName: \"kubernetes.io/projected/deaa317b-dcbd-4f5a-894a-05b8e835a95f-kube-api-access-98wrx\") pod \"collect-profiles-29412525-8t84f\" (UID: \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.279324 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/deaa317b-dcbd-4f5a-894a-05b8e835a95f-secret-volume\") pod \"collect-profiles-29412525-8t84f\" (UID: \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.279351 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/deaa317b-dcbd-4f5a-894a-05b8e835a95f-config-volume\") pod \"collect-profiles-29412525-8t84f\" (UID: \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.381633 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98wrx\" (UniqueName: \"kubernetes.io/projected/deaa317b-dcbd-4f5a-894a-05b8e835a95f-kube-api-access-98wrx\") pod \"collect-profiles-29412525-8t84f\" (UID: \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.381707 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/deaa317b-dcbd-4f5a-894a-05b8e835a95f-secret-volume\") pod \"collect-profiles-29412525-8t84f\" (UID: \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.381731 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/deaa317b-dcbd-4f5a-894a-05b8e835a95f-config-volume\") pod \"collect-profiles-29412525-8t84f\" (UID: \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.382692 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/deaa317b-dcbd-4f5a-894a-05b8e835a95f-config-volume\") pod \"collect-profiles-29412525-8t84f\" (UID: \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.387836 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/deaa317b-dcbd-4f5a-894a-05b8e835a95f-secret-volume\") pod \"collect-profiles-29412525-8t84f\" (UID: \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.402636 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98wrx\" (UniqueName: \"kubernetes.io/projected/deaa317b-dcbd-4f5a-894a-05b8e835a95f-kube-api-access-98wrx\") pod \"collect-profiles-29412525-8t84f\" (UID: \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" Dec 03 08:45:00 crc kubenswrapper[4612]: I1203 08:45:00.515848 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" Dec 03 08:45:01 crc kubenswrapper[4612]: I1203 08:45:01.050663 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f"] Dec 03 08:45:01 crc kubenswrapper[4612]: I1203 08:45:01.466562 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" event={"ID":"deaa317b-dcbd-4f5a-894a-05b8e835a95f","Type":"ContainerStarted","Data":"e48ad13cc216fe6982568e5ffd94e41e36178dbd0866003652f3975c41d2bdad"} Dec 03 08:45:01 crc kubenswrapper[4612]: I1203 08:45:01.466818 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" event={"ID":"deaa317b-dcbd-4f5a-894a-05b8e835a95f","Type":"ContainerStarted","Data":"34b608a2128ddead05acf8ad7dc7969b0ddb5a3c4723f478ae50380bfa07096a"} Dec 03 08:45:01 crc kubenswrapper[4612]: I1203 08:45:01.494035 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" podStartSLOduration=1.494012347 podStartE2EDuration="1.494012347s" podCreationTimestamp="2025-12-03 08:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:45:01.483859061 +0000 UTC m=+4664.657216471" watchObservedRunningTime="2025-12-03 08:45:01.494012347 +0000 UTC m=+4664.667369757" Dec 03 08:45:02 crc kubenswrapper[4612]: I1203 08:45:02.477359 4612 generic.go:334] "Generic (PLEG): container finished" podID="deaa317b-dcbd-4f5a-894a-05b8e835a95f" containerID="e48ad13cc216fe6982568e5ffd94e41e36178dbd0866003652f3975c41d2bdad" exitCode=0 Dec 03 08:45:02 crc kubenswrapper[4612]: I1203 08:45:02.477480 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" event={"ID":"deaa317b-dcbd-4f5a-894a-05b8e835a95f","Type":"ContainerDied","Data":"e48ad13cc216fe6982568e5ffd94e41e36178dbd0866003652f3975c41d2bdad"} Dec 03 08:45:03 crc kubenswrapper[4612]: I1203 08:45:03.907640 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-845bw"] Dec 03 08:45:03 crc kubenswrapper[4612]: I1203 08:45:03.921568 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:03 crc kubenswrapper[4612]: I1203 08:45:03.938850 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" Dec 03 08:45:03 crc kubenswrapper[4612]: I1203 08:45:03.961244 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98wrx\" (UniqueName: \"kubernetes.io/projected/deaa317b-dcbd-4f5a-894a-05b8e835a95f-kube-api-access-98wrx\") pod \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\" (UID: \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\") " Dec 03 08:45:03 crc kubenswrapper[4612]: I1203 08:45:03.964470 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/deaa317b-dcbd-4f5a-894a-05b8e835a95f-config-volume\") pod \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\" (UID: \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\") " Dec 03 08:45:03 crc kubenswrapper[4612]: I1203 08:45:03.964664 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/deaa317b-dcbd-4f5a-894a-05b8e835a95f-secret-volume\") pod \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\" (UID: \"deaa317b-dcbd-4f5a-894a-05b8e835a95f\") " Dec 03 08:45:03 crc kubenswrapper[4612]: I1203 08:45:03.965530 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56m5h\" (UniqueName: \"kubernetes.io/projected/76b9584c-cef3-44d8-8c53-83836062d37b-kube-api-access-56m5h\") pod \"redhat-operators-845bw\" (UID: \"76b9584c-cef3-44d8-8c53-83836062d37b\") " pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:03 crc kubenswrapper[4612]: I1203 08:45:03.965663 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76b9584c-cef3-44d8-8c53-83836062d37b-utilities\") pod \"redhat-operators-845bw\" (UID: \"76b9584c-cef3-44d8-8c53-83836062d37b\") " pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:03 crc kubenswrapper[4612]: I1203 08:45:03.965786 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-845bw"] Dec 03 08:45:03 crc kubenswrapper[4612]: I1203 08:45:03.966597 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76b9584c-cef3-44d8-8c53-83836062d37b-catalog-content\") pod \"redhat-operators-845bw\" (UID: \"76b9584c-cef3-44d8-8c53-83836062d37b\") " pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:03 crc kubenswrapper[4612]: I1203 08:45:03.968481 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/deaa317b-dcbd-4f5a-894a-05b8e835a95f-config-volume" (OuterVolumeSpecName: "config-volume") pod "deaa317b-dcbd-4f5a-894a-05b8e835a95f" (UID: "deaa317b-dcbd-4f5a-894a-05b8e835a95f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.019617 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/deaa317b-dcbd-4f5a-894a-05b8e835a95f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "deaa317b-dcbd-4f5a-894a-05b8e835a95f" (UID: "deaa317b-dcbd-4f5a-894a-05b8e835a95f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.035121 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/deaa317b-dcbd-4f5a-894a-05b8e835a95f-kube-api-access-98wrx" (OuterVolumeSpecName: "kube-api-access-98wrx") pod "deaa317b-dcbd-4f5a-894a-05b8e835a95f" (UID: "deaa317b-dcbd-4f5a-894a-05b8e835a95f"). InnerVolumeSpecName "kube-api-access-98wrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.086682 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76b9584c-cef3-44d8-8c53-83836062d37b-catalog-content\") pod \"redhat-operators-845bw\" (UID: \"76b9584c-cef3-44d8-8c53-83836062d37b\") " pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.086901 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56m5h\" (UniqueName: \"kubernetes.io/projected/76b9584c-cef3-44d8-8c53-83836062d37b-kube-api-access-56m5h\") pod \"redhat-operators-845bw\" (UID: \"76b9584c-cef3-44d8-8c53-83836062d37b\") " pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.087041 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76b9584c-cef3-44d8-8c53-83836062d37b-utilities\") pod \"redhat-operators-845bw\" (UID: \"76b9584c-cef3-44d8-8c53-83836062d37b\") " pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.087157 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98wrx\" (UniqueName: \"kubernetes.io/projected/deaa317b-dcbd-4f5a-894a-05b8e835a95f-kube-api-access-98wrx\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.087171 4612 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/deaa317b-dcbd-4f5a-894a-05b8e835a95f-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.087181 4612 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/deaa317b-dcbd-4f5a-894a-05b8e835a95f-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.088433 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76b9584c-cef3-44d8-8c53-83836062d37b-catalog-content\") pod \"redhat-operators-845bw\" (UID: \"76b9584c-cef3-44d8-8c53-83836062d37b\") " pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.089315 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76b9584c-cef3-44d8-8c53-83836062d37b-utilities\") pod \"redhat-operators-845bw\" (UID: \"76b9584c-cef3-44d8-8c53-83836062d37b\") " pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.122821 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56m5h\" (UniqueName: \"kubernetes.io/projected/76b9584c-cef3-44d8-8c53-83836062d37b-kube-api-access-56m5h\") pod \"redhat-operators-845bw\" (UID: \"76b9584c-cef3-44d8-8c53-83836062d37b\") " pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.410604 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.498408 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" event={"ID":"deaa317b-dcbd-4f5a-894a-05b8e835a95f","Type":"ContainerDied","Data":"34b608a2128ddead05acf8ad7dc7969b0ddb5a3c4723f478ae50380bfa07096a"} Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.498459 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34b608a2128ddead05acf8ad7dc7969b0ddb5a3c4723f478ae50380bfa07096a" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.498471 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8t84f" Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.628603 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z"] Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.640801 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412480-x4p2z"] Dec 03 08:45:04 crc kubenswrapper[4612]: I1203 08:45:04.991720 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-845bw"] Dec 03 08:45:05 crc kubenswrapper[4612]: I1203 08:45:05.102473 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="578f949e-0359-4447-b1cf-4730fac4373d" path="/var/lib/kubelet/pods/578f949e-0359-4447-b1cf-4730fac4373d/volumes" Dec 03 08:45:05 crc kubenswrapper[4612]: I1203 08:45:05.508457 4612 generic.go:334] "Generic (PLEG): container finished" podID="76b9584c-cef3-44d8-8c53-83836062d37b" containerID="6733d858d81d0e9346abce2cef00a3b41918b9e114b0603cdbcaaf27a59c7314" exitCode=0 Dec 03 08:45:05 crc kubenswrapper[4612]: I1203 08:45:05.508506 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-845bw" event={"ID":"76b9584c-cef3-44d8-8c53-83836062d37b","Type":"ContainerDied","Data":"6733d858d81d0e9346abce2cef00a3b41918b9e114b0603cdbcaaf27a59c7314"} Dec 03 08:45:05 crc kubenswrapper[4612]: I1203 08:45:05.508539 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-845bw" event={"ID":"76b9584c-cef3-44d8-8c53-83836062d37b","Type":"ContainerStarted","Data":"2c34f207e666daa7e8b0c49b276e93fe0ef181a7cb3286349577026be6658f5d"} Dec 03 08:45:05 crc kubenswrapper[4612]: I1203 08:45:05.510465 4612 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 08:45:07 crc kubenswrapper[4612]: I1203 08:45:07.528867 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-845bw" event={"ID":"76b9584c-cef3-44d8-8c53-83836062d37b","Type":"ContainerStarted","Data":"99689543258c26a2a0df7eca26c9d54109d7a77305121b83fa949a075eddadc3"} Dec 03 08:45:17 crc kubenswrapper[4612]: I1203 08:45:17.619168 4612 generic.go:334] "Generic (PLEG): container finished" podID="76b9584c-cef3-44d8-8c53-83836062d37b" containerID="99689543258c26a2a0df7eca26c9d54109d7a77305121b83fa949a075eddadc3" exitCode=0 Dec 03 08:45:17 crc kubenswrapper[4612]: I1203 08:45:17.619218 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-845bw" event={"ID":"76b9584c-cef3-44d8-8c53-83836062d37b","Type":"ContainerDied","Data":"99689543258c26a2a0df7eca26c9d54109d7a77305121b83fa949a075eddadc3"} Dec 03 08:45:19 crc kubenswrapper[4612]: I1203 08:45:19.645014 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-845bw" event={"ID":"76b9584c-cef3-44d8-8c53-83836062d37b","Type":"ContainerStarted","Data":"da4bc4f5d662f50c34f45c0aa1b56e6fdccc92cf32e6f965037385b021bed477"} Dec 03 08:45:19 crc kubenswrapper[4612]: I1203 08:45:19.670120 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-845bw" podStartSLOduration=3.741798582 podStartE2EDuration="16.6700998s" podCreationTimestamp="2025-12-03 08:45:03 +0000 UTC" firstStartedPulling="2025-12-03 08:45:05.510189597 +0000 UTC m=+4668.683546997" lastFinishedPulling="2025-12-03 08:45:18.438490805 +0000 UTC m=+4681.611848215" observedRunningTime="2025-12-03 08:45:19.668524572 +0000 UTC m=+4682.841881992" watchObservedRunningTime="2025-12-03 08:45:19.6700998 +0000 UTC m=+4682.843457200" Dec 03 08:45:24 crc kubenswrapper[4612]: I1203 08:45:24.411497 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:24 crc kubenswrapper[4612]: I1203 08:45:24.412901 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:25 crc kubenswrapper[4612]: I1203 08:45:25.460961 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-845bw" podUID="76b9584c-cef3-44d8-8c53-83836062d37b" containerName="registry-server" probeResult="failure" output=< Dec 03 08:45:25 crc kubenswrapper[4612]: timeout: failed to connect service ":50051" within 1s Dec 03 08:45:25 crc kubenswrapper[4612]: > Dec 03 08:45:34 crc kubenswrapper[4612]: I1203 08:45:34.475175 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:34 crc kubenswrapper[4612]: I1203 08:45:34.551560 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:35 crc kubenswrapper[4612]: I1203 08:45:35.186668 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-845bw"] Dec 03 08:45:35 crc kubenswrapper[4612]: I1203 08:45:35.806742 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-845bw" podUID="76b9584c-cef3-44d8-8c53-83836062d37b" containerName="registry-server" containerID="cri-o://da4bc4f5d662f50c34f45c0aa1b56e6fdccc92cf32e6f965037385b021bed477" gracePeriod=2 Dec 03 08:45:36 crc kubenswrapper[4612]: I1203 08:45:36.820780 4612 generic.go:334] "Generic (PLEG): container finished" podID="76b9584c-cef3-44d8-8c53-83836062d37b" containerID="da4bc4f5d662f50c34f45c0aa1b56e6fdccc92cf32e6f965037385b021bed477" exitCode=0 Dec 03 08:45:36 crc kubenswrapper[4612]: I1203 08:45:36.820882 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-845bw" event={"ID":"76b9584c-cef3-44d8-8c53-83836062d37b","Type":"ContainerDied","Data":"da4bc4f5d662f50c34f45c0aa1b56e6fdccc92cf32e6f965037385b021bed477"} Dec 03 08:45:36 crc kubenswrapper[4612]: I1203 08:45:36.821453 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-845bw" event={"ID":"76b9584c-cef3-44d8-8c53-83836062d37b","Type":"ContainerDied","Data":"2c34f207e666daa7e8b0c49b276e93fe0ef181a7cb3286349577026be6658f5d"} Dec 03 08:45:36 crc kubenswrapper[4612]: I1203 08:45:36.821475 4612 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c34f207e666daa7e8b0c49b276e93fe0ef181a7cb3286349577026be6658f5d" Dec 03 08:45:36 crc kubenswrapper[4612]: I1203 08:45:36.868579 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:36 crc kubenswrapper[4612]: I1203 08:45:36.946042 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76b9584c-cef3-44d8-8c53-83836062d37b-utilities\") pod \"76b9584c-cef3-44d8-8c53-83836062d37b\" (UID: \"76b9584c-cef3-44d8-8c53-83836062d37b\") " Dec 03 08:45:36 crc kubenswrapper[4612]: I1203 08:45:36.946135 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56m5h\" (UniqueName: \"kubernetes.io/projected/76b9584c-cef3-44d8-8c53-83836062d37b-kube-api-access-56m5h\") pod \"76b9584c-cef3-44d8-8c53-83836062d37b\" (UID: \"76b9584c-cef3-44d8-8c53-83836062d37b\") " Dec 03 08:45:36 crc kubenswrapper[4612]: I1203 08:45:36.946197 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76b9584c-cef3-44d8-8c53-83836062d37b-catalog-content\") pod \"76b9584c-cef3-44d8-8c53-83836062d37b\" (UID: \"76b9584c-cef3-44d8-8c53-83836062d37b\") " Dec 03 08:45:36 crc kubenswrapper[4612]: I1203 08:45:36.947333 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76b9584c-cef3-44d8-8c53-83836062d37b-utilities" (OuterVolumeSpecName: "utilities") pod "76b9584c-cef3-44d8-8c53-83836062d37b" (UID: "76b9584c-cef3-44d8-8c53-83836062d37b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:45:36 crc kubenswrapper[4612]: I1203 08:45:36.952774 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76b9584c-cef3-44d8-8c53-83836062d37b-kube-api-access-56m5h" (OuterVolumeSpecName: "kube-api-access-56m5h") pod "76b9584c-cef3-44d8-8c53-83836062d37b" (UID: "76b9584c-cef3-44d8-8c53-83836062d37b"). InnerVolumeSpecName "kube-api-access-56m5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:45:37 crc kubenswrapper[4612]: I1203 08:45:37.049213 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76b9584c-cef3-44d8-8c53-83836062d37b-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:37 crc kubenswrapper[4612]: I1203 08:45:37.049774 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56m5h\" (UniqueName: \"kubernetes.io/projected/76b9584c-cef3-44d8-8c53-83836062d37b-kube-api-access-56m5h\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:37 crc kubenswrapper[4612]: I1203 08:45:37.070844 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76b9584c-cef3-44d8-8c53-83836062d37b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "76b9584c-cef3-44d8-8c53-83836062d37b" (UID: "76b9584c-cef3-44d8-8c53-83836062d37b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:45:37 crc kubenswrapper[4612]: I1203 08:45:37.152205 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76b9584c-cef3-44d8-8c53-83836062d37b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:37 crc kubenswrapper[4612]: I1203 08:45:37.829579 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-845bw" Dec 03 08:45:37 crc kubenswrapper[4612]: I1203 08:45:37.858713 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-845bw"] Dec 03 08:45:37 crc kubenswrapper[4612]: I1203 08:45:37.868253 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-845bw"] Dec 03 08:45:39 crc kubenswrapper[4612]: I1203 08:45:39.107111 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76b9584c-cef3-44d8-8c53-83836062d37b" path="/var/lib/kubelet/pods/76b9584c-cef3-44d8-8c53-83836062d37b/volumes" Dec 03 08:45:46 crc kubenswrapper[4612]: I1203 08:45:46.830651 4612 scope.go:117] "RemoveContainer" containerID="b7b2d4b46af12fcd2dcd920d7aebc4265a810b52e6f12b923b346191057e2fc1" Dec 03 08:46:14 crc kubenswrapper[4612]: I1203 08:46:14.224715 4612 generic.go:334] "Generic (PLEG): container finished" podID="e4f1e8aa-37f4-41d8-8171-3f156d9d610d" containerID="742e33a909a700167757e19be6d3c8966200a359efd224d9e436af0efe29178c" exitCode=0 Dec 03 08:46:14 crc kubenswrapper[4612]: I1203 08:46:14.224803 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-827lw/must-gather-k9brt" event={"ID":"e4f1e8aa-37f4-41d8-8171-3f156d9d610d","Type":"ContainerDied","Data":"742e33a909a700167757e19be6d3c8966200a359efd224d9e436af0efe29178c"} Dec 03 08:46:14 crc kubenswrapper[4612]: I1203 08:46:14.226962 4612 scope.go:117] "RemoveContainer" containerID="742e33a909a700167757e19be6d3c8966200a359efd224d9e436af0efe29178c" Dec 03 08:46:14 crc kubenswrapper[4612]: I1203 08:46:14.613910 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-827lw_must-gather-k9brt_e4f1e8aa-37f4-41d8-8171-3f156d9d610d/gather/0.log" Dec 03 08:46:17 crc kubenswrapper[4612]: I1203 08:46:17.135916 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:46:17 crc kubenswrapper[4612]: I1203 08:46:17.136576 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:46:24 crc kubenswrapper[4612]: I1203 08:46:24.231767 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-827lw/must-gather-k9brt"] Dec 03 08:46:24 crc kubenswrapper[4612]: I1203 08:46:24.232508 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-827lw/must-gather-k9brt" podUID="e4f1e8aa-37f4-41d8-8171-3f156d9d610d" containerName="copy" containerID="cri-o://e04f5a7b30e12e2d6556daa3bb16d22d29cc2d2cb04c6ea0088278cbeb7b79f4" gracePeriod=2 Dec 03 08:46:24 crc kubenswrapper[4612]: I1203 08:46:24.240470 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-827lw/must-gather-k9brt"] Dec 03 08:46:24 crc kubenswrapper[4612]: I1203 08:46:24.730951 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-827lw_must-gather-k9brt_e4f1e8aa-37f4-41d8-8171-3f156d9d610d/copy/0.log" Dec 03 08:46:24 crc kubenswrapper[4612]: I1203 08:46:24.731811 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/must-gather-k9brt" Dec 03 08:46:24 crc kubenswrapper[4612]: I1203 08:46:24.843229 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6s7fl\" (UniqueName: \"kubernetes.io/projected/e4f1e8aa-37f4-41d8-8171-3f156d9d610d-kube-api-access-6s7fl\") pod \"e4f1e8aa-37f4-41d8-8171-3f156d9d610d\" (UID: \"e4f1e8aa-37f4-41d8-8171-3f156d9d610d\") " Dec 03 08:46:24 crc kubenswrapper[4612]: I1203 08:46:24.843318 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e4f1e8aa-37f4-41d8-8171-3f156d9d610d-must-gather-output\") pod \"e4f1e8aa-37f4-41d8-8171-3f156d9d610d\" (UID: \"e4f1e8aa-37f4-41d8-8171-3f156d9d610d\") " Dec 03 08:46:24 crc kubenswrapper[4612]: I1203 08:46:24.858172 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4f1e8aa-37f4-41d8-8171-3f156d9d610d-kube-api-access-6s7fl" (OuterVolumeSpecName: "kube-api-access-6s7fl") pod "e4f1e8aa-37f4-41d8-8171-3f156d9d610d" (UID: "e4f1e8aa-37f4-41d8-8171-3f156d9d610d"). InnerVolumeSpecName "kube-api-access-6s7fl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:46:24 crc kubenswrapper[4612]: I1203 08:46:24.945461 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6s7fl\" (UniqueName: \"kubernetes.io/projected/e4f1e8aa-37f4-41d8-8171-3f156d9d610d-kube-api-access-6s7fl\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:25 crc kubenswrapper[4612]: I1203 08:46:25.044833 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4f1e8aa-37f4-41d8-8171-3f156d9d610d-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "e4f1e8aa-37f4-41d8-8171-3f156d9d610d" (UID: "e4f1e8aa-37f4-41d8-8171-3f156d9d610d"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:46:25 crc kubenswrapper[4612]: I1203 08:46:25.047598 4612 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e4f1e8aa-37f4-41d8-8171-3f156d9d610d-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:25 crc kubenswrapper[4612]: I1203 08:46:25.117465 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4f1e8aa-37f4-41d8-8171-3f156d9d610d" path="/var/lib/kubelet/pods/e4f1e8aa-37f4-41d8-8171-3f156d9d610d/volumes" Dec 03 08:46:25 crc kubenswrapper[4612]: I1203 08:46:25.334818 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-827lw_must-gather-k9brt_e4f1e8aa-37f4-41d8-8171-3f156d9d610d/copy/0.log" Dec 03 08:46:25 crc kubenswrapper[4612]: I1203 08:46:25.335137 4612 generic.go:334] "Generic (PLEG): container finished" podID="e4f1e8aa-37f4-41d8-8171-3f156d9d610d" containerID="e04f5a7b30e12e2d6556daa3bb16d22d29cc2d2cb04c6ea0088278cbeb7b79f4" exitCode=143 Dec 03 08:46:25 crc kubenswrapper[4612]: I1203 08:46:25.335189 4612 scope.go:117] "RemoveContainer" containerID="e04f5a7b30e12e2d6556daa3bb16d22d29cc2d2cb04c6ea0088278cbeb7b79f4" Dec 03 08:46:25 crc kubenswrapper[4612]: I1203 08:46:25.335198 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-827lw/must-gather-k9brt" Dec 03 08:46:25 crc kubenswrapper[4612]: I1203 08:46:25.361139 4612 scope.go:117] "RemoveContainer" containerID="742e33a909a700167757e19be6d3c8966200a359efd224d9e436af0efe29178c" Dec 03 08:46:25 crc kubenswrapper[4612]: I1203 08:46:25.422810 4612 scope.go:117] "RemoveContainer" containerID="e04f5a7b30e12e2d6556daa3bb16d22d29cc2d2cb04c6ea0088278cbeb7b79f4" Dec 03 08:46:25 crc kubenswrapper[4612]: E1203 08:46:25.424041 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e04f5a7b30e12e2d6556daa3bb16d22d29cc2d2cb04c6ea0088278cbeb7b79f4\": container with ID starting with e04f5a7b30e12e2d6556daa3bb16d22d29cc2d2cb04c6ea0088278cbeb7b79f4 not found: ID does not exist" containerID="e04f5a7b30e12e2d6556daa3bb16d22d29cc2d2cb04c6ea0088278cbeb7b79f4" Dec 03 08:46:25 crc kubenswrapper[4612]: I1203 08:46:25.424073 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e04f5a7b30e12e2d6556daa3bb16d22d29cc2d2cb04c6ea0088278cbeb7b79f4"} err="failed to get container status \"e04f5a7b30e12e2d6556daa3bb16d22d29cc2d2cb04c6ea0088278cbeb7b79f4\": rpc error: code = NotFound desc = could not find container \"e04f5a7b30e12e2d6556daa3bb16d22d29cc2d2cb04c6ea0088278cbeb7b79f4\": container with ID starting with e04f5a7b30e12e2d6556daa3bb16d22d29cc2d2cb04c6ea0088278cbeb7b79f4 not found: ID does not exist" Dec 03 08:46:25 crc kubenswrapper[4612]: I1203 08:46:25.424098 4612 scope.go:117] "RemoveContainer" containerID="742e33a909a700167757e19be6d3c8966200a359efd224d9e436af0efe29178c" Dec 03 08:46:25 crc kubenswrapper[4612]: E1203 08:46:25.424420 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"742e33a909a700167757e19be6d3c8966200a359efd224d9e436af0efe29178c\": container with ID starting with 742e33a909a700167757e19be6d3c8966200a359efd224d9e436af0efe29178c not found: ID does not exist" containerID="742e33a909a700167757e19be6d3c8966200a359efd224d9e436af0efe29178c" Dec 03 08:46:25 crc kubenswrapper[4612]: I1203 08:46:25.424455 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"742e33a909a700167757e19be6d3c8966200a359efd224d9e436af0efe29178c"} err="failed to get container status \"742e33a909a700167757e19be6d3c8966200a359efd224d9e436af0efe29178c\": rpc error: code = NotFound desc = could not find container \"742e33a909a700167757e19be6d3c8966200a359efd224d9e436af0efe29178c\": container with ID starting with 742e33a909a700167757e19be6d3c8966200a359efd224d9e436af0efe29178c not found: ID does not exist" Dec 03 08:46:46 crc kubenswrapper[4612]: I1203 08:46:46.915819 4612 scope.go:117] "RemoveContainer" containerID="526778d627a19599fc5c6b4b62c1c9ba59d495985f7a170a611c654e8855312b" Dec 03 08:46:47 crc kubenswrapper[4612]: I1203 08:46:47.136369 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:46:47 crc kubenswrapper[4612]: I1203 08:46:47.136681 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.783196 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nt4xm"] Dec 03 08:47:16 crc kubenswrapper[4612]: E1203 08:47:16.784214 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4f1e8aa-37f4-41d8-8171-3f156d9d610d" containerName="gather" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.784232 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4f1e8aa-37f4-41d8-8171-3f156d9d610d" containerName="gather" Dec 03 08:47:16 crc kubenswrapper[4612]: E1203 08:47:16.784255 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4f1e8aa-37f4-41d8-8171-3f156d9d610d" containerName="copy" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.784265 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4f1e8aa-37f4-41d8-8171-3f156d9d610d" containerName="copy" Dec 03 08:47:16 crc kubenswrapper[4612]: E1203 08:47:16.784284 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76b9584c-cef3-44d8-8c53-83836062d37b" containerName="extract-utilities" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.784294 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="76b9584c-cef3-44d8-8c53-83836062d37b" containerName="extract-utilities" Dec 03 08:47:16 crc kubenswrapper[4612]: E1203 08:47:16.784318 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deaa317b-dcbd-4f5a-894a-05b8e835a95f" containerName="collect-profiles" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.784326 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="deaa317b-dcbd-4f5a-894a-05b8e835a95f" containerName="collect-profiles" Dec 03 08:47:16 crc kubenswrapper[4612]: E1203 08:47:16.784342 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76b9584c-cef3-44d8-8c53-83836062d37b" containerName="registry-server" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.784350 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="76b9584c-cef3-44d8-8c53-83836062d37b" containerName="registry-server" Dec 03 08:47:16 crc kubenswrapper[4612]: E1203 08:47:16.784367 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76b9584c-cef3-44d8-8c53-83836062d37b" containerName="extract-content" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.784375 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="76b9584c-cef3-44d8-8c53-83836062d37b" containerName="extract-content" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.784628 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="76b9584c-cef3-44d8-8c53-83836062d37b" containerName="registry-server" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.784649 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="deaa317b-dcbd-4f5a-894a-05b8e835a95f" containerName="collect-profiles" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.784672 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4f1e8aa-37f4-41d8-8171-3f156d9d610d" containerName="gather" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.784692 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4f1e8aa-37f4-41d8-8171-3f156d9d610d" containerName="copy" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.789405 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.796396 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nt4xm"] Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.822701 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/122075a6-289f-4257-b472-7cdf8b1d750c-utilities\") pod \"certified-operators-nt4xm\" (UID: \"122075a6-289f-4257-b472-7cdf8b1d750c\") " pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.822782 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/122075a6-289f-4257-b472-7cdf8b1d750c-catalog-content\") pod \"certified-operators-nt4xm\" (UID: \"122075a6-289f-4257-b472-7cdf8b1d750c\") " pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.822857 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzgkl\" (UniqueName: \"kubernetes.io/projected/122075a6-289f-4257-b472-7cdf8b1d750c-kube-api-access-zzgkl\") pod \"certified-operators-nt4xm\" (UID: \"122075a6-289f-4257-b472-7cdf8b1d750c\") " pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.924600 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzgkl\" (UniqueName: \"kubernetes.io/projected/122075a6-289f-4257-b472-7cdf8b1d750c-kube-api-access-zzgkl\") pod \"certified-operators-nt4xm\" (UID: \"122075a6-289f-4257-b472-7cdf8b1d750c\") " pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.924758 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/122075a6-289f-4257-b472-7cdf8b1d750c-utilities\") pod \"certified-operators-nt4xm\" (UID: \"122075a6-289f-4257-b472-7cdf8b1d750c\") " pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.924804 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/122075a6-289f-4257-b472-7cdf8b1d750c-catalog-content\") pod \"certified-operators-nt4xm\" (UID: \"122075a6-289f-4257-b472-7cdf8b1d750c\") " pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.925312 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/122075a6-289f-4257-b472-7cdf8b1d750c-catalog-content\") pod \"certified-operators-nt4xm\" (UID: \"122075a6-289f-4257-b472-7cdf8b1d750c\") " pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.925695 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/122075a6-289f-4257-b472-7cdf8b1d750c-utilities\") pod \"certified-operators-nt4xm\" (UID: \"122075a6-289f-4257-b472-7cdf8b1d750c\") " pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:16 crc kubenswrapper[4612]: I1203 08:47:16.956871 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzgkl\" (UniqueName: \"kubernetes.io/projected/122075a6-289f-4257-b472-7cdf8b1d750c-kube-api-access-zzgkl\") pod \"certified-operators-nt4xm\" (UID: \"122075a6-289f-4257-b472-7cdf8b1d750c\") " pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:17 crc kubenswrapper[4612]: I1203 08:47:17.118005 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:17 crc kubenswrapper[4612]: I1203 08:47:17.136461 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:47:17 crc kubenswrapper[4612]: I1203 08:47:17.136520 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:47:17 crc kubenswrapper[4612]: I1203 08:47:17.136573 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 08:47:17 crc kubenswrapper[4612]: I1203 08:47:17.137316 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0bcf830ffe2e79df0eb12515a9792b4731d2ecc59a39866247af5869720338dc"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:47:17 crc kubenswrapper[4612]: I1203 08:47:17.137379 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://0bcf830ffe2e79df0eb12515a9792b4731d2ecc59a39866247af5869720338dc" gracePeriod=600 Dec 03 08:47:17 crc kubenswrapper[4612]: I1203 08:47:17.729241 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nt4xm"] Dec 03 08:47:17 crc kubenswrapper[4612]: I1203 08:47:17.889319 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="0bcf830ffe2e79df0eb12515a9792b4731d2ecc59a39866247af5869720338dc" exitCode=0 Dec 03 08:47:17 crc kubenswrapper[4612]: I1203 08:47:17.889430 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"0bcf830ffe2e79df0eb12515a9792b4731d2ecc59a39866247af5869720338dc"} Dec 03 08:47:17 crc kubenswrapper[4612]: I1203 08:47:17.889464 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d"} Dec 03 08:47:17 crc kubenswrapper[4612]: I1203 08:47:17.889484 4612 scope.go:117] "RemoveContainer" containerID="f95acefa497e38ac041a16453306eaf9dc0194458bbf1571d219054662717a44" Dec 03 08:47:17 crc kubenswrapper[4612]: I1203 08:47:17.892701 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nt4xm" event={"ID":"122075a6-289f-4257-b472-7cdf8b1d750c","Type":"ContainerStarted","Data":"9ee9c014fcc5ec72cea9d523eb47257eb387111a1f5a8301892e0a8b4d362036"} Dec 03 08:47:18 crc kubenswrapper[4612]: I1203 08:47:18.903198 4612 generic.go:334] "Generic (PLEG): container finished" podID="122075a6-289f-4257-b472-7cdf8b1d750c" containerID="d8ac1223931fa28a9ec61a080a5c8c0ab7bd8d607ca533c4f0f3ca2d58f33d61" exitCode=0 Dec 03 08:47:18 crc kubenswrapper[4612]: I1203 08:47:18.903256 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nt4xm" event={"ID":"122075a6-289f-4257-b472-7cdf8b1d750c","Type":"ContainerDied","Data":"d8ac1223931fa28a9ec61a080a5c8c0ab7bd8d607ca533c4f0f3ca2d58f33d61"} Dec 03 08:47:19 crc kubenswrapper[4612]: I1203 08:47:19.914995 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nt4xm" event={"ID":"122075a6-289f-4257-b472-7cdf8b1d750c","Type":"ContainerStarted","Data":"6e96e7f7778108250248e4b47ef58c6efe3e5645e080625490f6847989b073be"} Dec 03 08:47:20 crc kubenswrapper[4612]: I1203 08:47:20.924394 4612 generic.go:334] "Generic (PLEG): container finished" podID="122075a6-289f-4257-b472-7cdf8b1d750c" containerID="6e96e7f7778108250248e4b47ef58c6efe3e5645e080625490f6847989b073be" exitCode=0 Dec 03 08:47:20 crc kubenswrapper[4612]: I1203 08:47:20.924488 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nt4xm" event={"ID":"122075a6-289f-4257-b472-7cdf8b1d750c","Type":"ContainerDied","Data":"6e96e7f7778108250248e4b47ef58c6efe3e5645e080625490f6847989b073be"} Dec 03 08:47:22 crc kubenswrapper[4612]: I1203 08:47:22.950685 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nt4xm" event={"ID":"122075a6-289f-4257-b472-7cdf8b1d750c","Type":"ContainerStarted","Data":"0aac30af087708d8ae5a09078575b814b398661ef88daca8a21f953918bc258e"} Dec 03 08:47:22 crc kubenswrapper[4612]: I1203 08:47:22.974214 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nt4xm" podStartSLOduration=4.457587534 podStartE2EDuration="6.974191756s" podCreationTimestamp="2025-12-03 08:47:16 +0000 UTC" firstStartedPulling="2025-12-03 08:47:18.905169845 +0000 UTC m=+4802.078527235" lastFinishedPulling="2025-12-03 08:47:21.421774047 +0000 UTC m=+4804.595131457" observedRunningTime="2025-12-03 08:47:22.969339288 +0000 UTC m=+4806.142696718" watchObservedRunningTime="2025-12-03 08:47:22.974191756 +0000 UTC m=+4806.147549156" Dec 03 08:47:27 crc kubenswrapper[4612]: I1203 08:47:27.119472 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:27 crc kubenswrapper[4612]: I1203 08:47:27.120109 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:27 crc kubenswrapper[4612]: I1203 08:47:27.207077 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:28 crc kubenswrapper[4612]: I1203 08:47:28.120715 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:28 crc kubenswrapper[4612]: I1203 08:47:28.188645 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nt4xm"] Dec 03 08:47:30 crc kubenswrapper[4612]: I1203 08:47:30.055702 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nt4xm" podUID="122075a6-289f-4257-b472-7cdf8b1d750c" containerName="registry-server" containerID="cri-o://0aac30af087708d8ae5a09078575b814b398661ef88daca8a21f953918bc258e" gracePeriod=2 Dec 03 08:47:31 crc kubenswrapper[4612]: I1203 08:47:31.068184 4612 generic.go:334] "Generic (PLEG): container finished" podID="122075a6-289f-4257-b472-7cdf8b1d750c" containerID="0aac30af087708d8ae5a09078575b814b398661ef88daca8a21f953918bc258e" exitCode=0 Dec 03 08:47:31 crc kubenswrapper[4612]: I1203 08:47:31.068241 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nt4xm" event={"ID":"122075a6-289f-4257-b472-7cdf8b1d750c","Type":"ContainerDied","Data":"0aac30af087708d8ae5a09078575b814b398661ef88daca8a21f953918bc258e"} Dec 03 08:47:31 crc kubenswrapper[4612]: I1203 08:47:31.629387 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:31 crc kubenswrapper[4612]: I1203 08:47:31.667525 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/122075a6-289f-4257-b472-7cdf8b1d750c-utilities\") pod \"122075a6-289f-4257-b472-7cdf8b1d750c\" (UID: \"122075a6-289f-4257-b472-7cdf8b1d750c\") " Dec 03 08:47:31 crc kubenswrapper[4612]: I1203 08:47:31.667653 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/122075a6-289f-4257-b472-7cdf8b1d750c-catalog-content\") pod \"122075a6-289f-4257-b472-7cdf8b1d750c\" (UID: \"122075a6-289f-4257-b472-7cdf8b1d750c\") " Dec 03 08:47:31 crc kubenswrapper[4612]: I1203 08:47:31.667709 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzgkl\" (UniqueName: \"kubernetes.io/projected/122075a6-289f-4257-b472-7cdf8b1d750c-kube-api-access-zzgkl\") pod \"122075a6-289f-4257-b472-7cdf8b1d750c\" (UID: \"122075a6-289f-4257-b472-7cdf8b1d750c\") " Dec 03 08:47:31 crc kubenswrapper[4612]: I1203 08:47:31.678318 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/122075a6-289f-4257-b472-7cdf8b1d750c-utilities" (OuterVolumeSpecName: "utilities") pod "122075a6-289f-4257-b472-7cdf8b1d750c" (UID: "122075a6-289f-4257-b472-7cdf8b1d750c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:47:31 crc kubenswrapper[4612]: I1203 08:47:31.690015 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/122075a6-289f-4257-b472-7cdf8b1d750c-kube-api-access-zzgkl" (OuterVolumeSpecName: "kube-api-access-zzgkl") pod "122075a6-289f-4257-b472-7cdf8b1d750c" (UID: "122075a6-289f-4257-b472-7cdf8b1d750c"). InnerVolumeSpecName "kube-api-access-zzgkl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:47:31 crc kubenswrapper[4612]: I1203 08:47:31.741283 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/122075a6-289f-4257-b472-7cdf8b1d750c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "122075a6-289f-4257-b472-7cdf8b1d750c" (UID: "122075a6-289f-4257-b472-7cdf8b1d750c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:47:31 crc kubenswrapper[4612]: I1203 08:47:31.769749 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/122075a6-289f-4257-b472-7cdf8b1d750c-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:47:31 crc kubenswrapper[4612]: I1203 08:47:31.769971 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/122075a6-289f-4257-b472-7cdf8b1d750c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:47:31 crc kubenswrapper[4612]: I1203 08:47:31.769994 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzgkl\" (UniqueName: \"kubernetes.io/projected/122075a6-289f-4257-b472-7cdf8b1d750c-kube-api-access-zzgkl\") on node \"crc\" DevicePath \"\"" Dec 03 08:47:32 crc kubenswrapper[4612]: I1203 08:47:32.083124 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nt4xm" event={"ID":"122075a6-289f-4257-b472-7cdf8b1d750c","Type":"ContainerDied","Data":"9ee9c014fcc5ec72cea9d523eb47257eb387111a1f5a8301892e0a8b4d362036"} Dec 03 08:47:32 crc kubenswrapper[4612]: I1203 08:47:32.083181 4612 scope.go:117] "RemoveContainer" containerID="0aac30af087708d8ae5a09078575b814b398661ef88daca8a21f953918bc258e" Dec 03 08:47:32 crc kubenswrapper[4612]: I1203 08:47:32.084145 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nt4xm" Dec 03 08:47:32 crc kubenswrapper[4612]: I1203 08:47:32.110487 4612 scope.go:117] "RemoveContainer" containerID="6e96e7f7778108250248e4b47ef58c6efe3e5645e080625490f6847989b073be" Dec 03 08:47:32 crc kubenswrapper[4612]: I1203 08:47:32.152763 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nt4xm"] Dec 03 08:47:32 crc kubenswrapper[4612]: I1203 08:47:32.153276 4612 scope.go:117] "RemoveContainer" containerID="d8ac1223931fa28a9ec61a080a5c8c0ab7bd8d607ca533c4f0f3ca2d58f33d61" Dec 03 08:47:32 crc kubenswrapper[4612]: I1203 08:47:32.167940 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nt4xm"] Dec 03 08:47:33 crc kubenswrapper[4612]: I1203 08:47:33.125180 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="122075a6-289f-4257-b472-7cdf8b1d750c" path="/var/lib/kubelet/pods/122075a6-289f-4257-b472-7cdf8b1d750c/volumes" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.422604 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-f45vg"] Dec 03 08:47:49 crc kubenswrapper[4612]: E1203 08:47:49.424022 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="122075a6-289f-4257-b472-7cdf8b1d750c" containerName="registry-server" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.424047 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="122075a6-289f-4257-b472-7cdf8b1d750c" containerName="registry-server" Dec 03 08:47:49 crc kubenswrapper[4612]: E1203 08:47:49.424071 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="122075a6-289f-4257-b472-7cdf8b1d750c" containerName="extract-content" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.424085 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="122075a6-289f-4257-b472-7cdf8b1d750c" containerName="extract-content" Dec 03 08:47:49 crc kubenswrapper[4612]: E1203 08:47:49.424122 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="122075a6-289f-4257-b472-7cdf8b1d750c" containerName="extract-utilities" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.424136 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="122075a6-289f-4257-b472-7cdf8b1d750c" containerName="extract-utilities" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.424506 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="122075a6-289f-4257-b472-7cdf8b1d750c" containerName="registry-server" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.426789 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.433812 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f45vg"] Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.476589 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88a8e13d-1a9a-4594-8eda-23715581f003-utilities\") pod \"redhat-marketplace-f45vg\" (UID: \"88a8e13d-1a9a-4594-8eda-23715581f003\") " pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.476841 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88a8e13d-1a9a-4594-8eda-23715581f003-catalog-content\") pod \"redhat-marketplace-f45vg\" (UID: \"88a8e13d-1a9a-4594-8eda-23715581f003\") " pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.477022 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jllxh\" (UniqueName: \"kubernetes.io/projected/88a8e13d-1a9a-4594-8eda-23715581f003-kube-api-access-jllxh\") pod \"redhat-marketplace-f45vg\" (UID: \"88a8e13d-1a9a-4594-8eda-23715581f003\") " pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.578808 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jllxh\" (UniqueName: \"kubernetes.io/projected/88a8e13d-1a9a-4594-8eda-23715581f003-kube-api-access-jllxh\") pod \"redhat-marketplace-f45vg\" (UID: \"88a8e13d-1a9a-4594-8eda-23715581f003\") " pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.579097 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88a8e13d-1a9a-4594-8eda-23715581f003-utilities\") pod \"redhat-marketplace-f45vg\" (UID: \"88a8e13d-1a9a-4594-8eda-23715581f003\") " pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.579267 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88a8e13d-1a9a-4594-8eda-23715581f003-catalog-content\") pod \"redhat-marketplace-f45vg\" (UID: \"88a8e13d-1a9a-4594-8eda-23715581f003\") " pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.579580 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88a8e13d-1a9a-4594-8eda-23715581f003-utilities\") pod \"redhat-marketplace-f45vg\" (UID: \"88a8e13d-1a9a-4594-8eda-23715581f003\") " pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.579596 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88a8e13d-1a9a-4594-8eda-23715581f003-catalog-content\") pod \"redhat-marketplace-f45vg\" (UID: \"88a8e13d-1a9a-4594-8eda-23715581f003\") " pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.598800 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jllxh\" (UniqueName: \"kubernetes.io/projected/88a8e13d-1a9a-4594-8eda-23715581f003-kube-api-access-jllxh\") pod \"redhat-marketplace-f45vg\" (UID: \"88a8e13d-1a9a-4594-8eda-23715581f003\") " pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:47:49 crc kubenswrapper[4612]: I1203 08:47:49.765725 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:47:50 crc kubenswrapper[4612]: I1203 08:47:50.282736 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f45vg"] Dec 03 08:47:51 crc kubenswrapper[4612]: I1203 08:47:51.273554 4612 generic.go:334] "Generic (PLEG): container finished" podID="88a8e13d-1a9a-4594-8eda-23715581f003" containerID="c8a6464b47026b8897900a01567bd06efc8eb677387040cd69c2f67c06682ffd" exitCode=0 Dec 03 08:47:51 crc kubenswrapper[4612]: I1203 08:47:51.273652 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f45vg" event={"ID":"88a8e13d-1a9a-4594-8eda-23715581f003","Type":"ContainerDied","Data":"c8a6464b47026b8897900a01567bd06efc8eb677387040cd69c2f67c06682ffd"} Dec 03 08:47:51 crc kubenswrapper[4612]: I1203 08:47:51.274023 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f45vg" event={"ID":"88a8e13d-1a9a-4594-8eda-23715581f003","Type":"ContainerStarted","Data":"cc8841586b4327ded3ce892e9f450e349dcd08690cd48cfbfd4fe8436fae35e9"} Dec 03 08:47:52 crc kubenswrapper[4612]: I1203 08:47:52.285829 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f45vg" event={"ID":"88a8e13d-1a9a-4594-8eda-23715581f003","Type":"ContainerStarted","Data":"1f14f681d68b54d1b272ab4209077b71df6590c14b63710ca25f6d9a4482f566"} Dec 03 08:47:53 crc kubenswrapper[4612]: I1203 08:47:53.295620 4612 generic.go:334] "Generic (PLEG): container finished" podID="88a8e13d-1a9a-4594-8eda-23715581f003" containerID="1f14f681d68b54d1b272ab4209077b71df6590c14b63710ca25f6d9a4482f566" exitCode=0 Dec 03 08:47:53 crc kubenswrapper[4612]: I1203 08:47:53.296482 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f45vg" event={"ID":"88a8e13d-1a9a-4594-8eda-23715581f003","Type":"ContainerDied","Data":"1f14f681d68b54d1b272ab4209077b71df6590c14b63710ca25f6d9a4482f566"} Dec 03 08:47:54 crc kubenswrapper[4612]: I1203 08:47:54.308586 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f45vg" event={"ID":"88a8e13d-1a9a-4594-8eda-23715581f003","Type":"ContainerStarted","Data":"0b1f39097a4fef53e7b7a1440e2613cff028077d3e12aded90dc86aa25a8de46"} Dec 03 08:47:54 crc kubenswrapper[4612]: I1203 08:47:54.333809 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-f45vg" podStartSLOduration=2.827439946 podStartE2EDuration="5.3337836s" podCreationTimestamp="2025-12-03 08:47:49 +0000 UTC" firstStartedPulling="2025-12-03 08:47:51.275585453 +0000 UTC m=+4834.448942873" lastFinishedPulling="2025-12-03 08:47:53.781929117 +0000 UTC m=+4836.955286527" observedRunningTime="2025-12-03 08:47:54.322634279 +0000 UTC m=+4837.495991679" watchObservedRunningTime="2025-12-03 08:47:54.3337836 +0000 UTC m=+4837.507141020" Dec 03 08:47:59 crc kubenswrapper[4612]: I1203 08:47:59.766361 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:47:59 crc kubenswrapper[4612]: I1203 08:47:59.766882 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:47:59 crc kubenswrapper[4612]: I1203 08:47:59.820178 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:48:00 crc kubenswrapper[4612]: I1203 08:48:00.418046 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:48:00 crc kubenswrapper[4612]: I1203 08:48:00.485790 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f45vg"] Dec 03 08:48:02 crc kubenswrapper[4612]: I1203 08:48:02.386064 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-f45vg" podUID="88a8e13d-1a9a-4594-8eda-23715581f003" containerName="registry-server" containerID="cri-o://0b1f39097a4fef53e7b7a1440e2613cff028077d3e12aded90dc86aa25a8de46" gracePeriod=2 Dec 03 08:48:02 crc kubenswrapper[4612]: I1203 08:48:02.879654 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.044507 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jllxh\" (UniqueName: \"kubernetes.io/projected/88a8e13d-1a9a-4594-8eda-23715581f003-kube-api-access-jllxh\") pod \"88a8e13d-1a9a-4594-8eda-23715581f003\" (UID: \"88a8e13d-1a9a-4594-8eda-23715581f003\") " Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.044697 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88a8e13d-1a9a-4594-8eda-23715581f003-catalog-content\") pod \"88a8e13d-1a9a-4594-8eda-23715581f003\" (UID: \"88a8e13d-1a9a-4594-8eda-23715581f003\") " Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.044762 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88a8e13d-1a9a-4594-8eda-23715581f003-utilities\") pod \"88a8e13d-1a9a-4594-8eda-23715581f003\" (UID: \"88a8e13d-1a9a-4594-8eda-23715581f003\") " Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.045678 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88a8e13d-1a9a-4594-8eda-23715581f003-utilities" (OuterVolumeSpecName: "utilities") pod "88a8e13d-1a9a-4594-8eda-23715581f003" (UID: "88a8e13d-1a9a-4594-8eda-23715581f003"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.046193 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88a8e13d-1a9a-4594-8eda-23715581f003-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.052316 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88a8e13d-1a9a-4594-8eda-23715581f003-kube-api-access-jllxh" (OuterVolumeSpecName: "kube-api-access-jllxh") pod "88a8e13d-1a9a-4594-8eda-23715581f003" (UID: "88a8e13d-1a9a-4594-8eda-23715581f003"). InnerVolumeSpecName "kube-api-access-jllxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.073022 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88a8e13d-1a9a-4594-8eda-23715581f003-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "88a8e13d-1a9a-4594-8eda-23715581f003" (UID: "88a8e13d-1a9a-4594-8eda-23715581f003"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.147928 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jllxh\" (UniqueName: \"kubernetes.io/projected/88a8e13d-1a9a-4594-8eda-23715581f003-kube-api-access-jllxh\") on node \"crc\" DevicePath \"\"" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.147991 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88a8e13d-1a9a-4594-8eda-23715581f003-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.398840 4612 generic.go:334] "Generic (PLEG): container finished" podID="88a8e13d-1a9a-4594-8eda-23715581f003" containerID="0b1f39097a4fef53e7b7a1440e2613cff028077d3e12aded90dc86aa25a8de46" exitCode=0 Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.398884 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f45vg" event={"ID":"88a8e13d-1a9a-4594-8eda-23715581f003","Type":"ContainerDied","Data":"0b1f39097a4fef53e7b7a1440e2613cff028077d3e12aded90dc86aa25a8de46"} Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.398913 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f45vg" event={"ID":"88a8e13d-1a9a-4594-8eda-23715581f003","Type":"ContainerDied","Data":"cc8841586b4327ded3ce892e9f450e349dcd08690cd48cfbfd4fe8436fae35e9"} Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.398933 4612 scope.go:117] "RemoveContainer" containerID="0b1f39097a4fef53e7b7a1440e2613cff028077d3e12aded90dc86aa25a8de46" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.398931 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f45vg" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.431819 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f45vg"] Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.433074 4612 scope.go:117] "RemoveContainer" containerID="1f14f681d68b54d1b272ab4209077b71df6590c14b63710ca25f6d9a4482f566" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.441203 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-f45vg"] Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.462170 4612 scope.go:117] "RemoveContainer" containerID="c8a6464b47026b8897900a01567bd06efc8eb677387040cd69c2f67c06682ffd" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.499443 4612 scope.go:117] "RemoveContainer" containerID="0b1f39097a4fef53e7b7a1440e2613cff028077d3e12aded90dc86aa25a8de46" Dec 03 08:48:03 crc kubenswrapper[4612]: E1203 08:48:03.500007 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b1f39097a4fef53e7b7a1440e2613cff028077d3e12aded90dc86aa25a8de46\": container with ID starting with 0b1f39097a4fef53e7b7a1440e2613cff028077d3e12aded90dc86aa25a8de46 not found: ID does not exist" containerID="0b1f39097a4fef53e7b7a1440e2613cff028077d3e12aded90dc86aa25a8de46" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.500045 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b1f39097a4fef53e7b7a1440e2613cff028077d3e12aded90dc86aa25a8de46"} err="failed to get container status \"0b1f39097a4fef53e7b7a1440e2613cff028077d3e12aded90dc86aa25a8de46\": rpc error: code = NotFound desc = could not find container \"0b1f39097a4fef53e7b7a1440e2613cff028077d3e12aded90dc86aa25a8de46\": container with ID starting with 0b1f39097a4fef53e7b7a1440e2613cff028077d3e12aded90dc86aa25a8de46 not found: ID does not exist" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.500070 4612 scope.go:117] "RemoveContainer" containerID="1f14f681d68b54d1b272ab4209077b71df6590c14b63710ca25f6d9a4482f566" Dec 03 08:48:03 crc kubenswrapper[4612]: E1203 08:48:03.501443 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f14f681d68b54d1b272ab4209077b71df6590c14b63710ca25f6d9a4482f566\": container with ID starting with 1f14f681d68b54d1b272ab4209077b71df6590c14b63710ca25f6d9a4482f566 not found: ID does not exist" containerID="1f14f681d68b54d1b272ab4209077b71df6590c14b63710ca25f6d9a4482f566" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.501474 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f14f681d68b54d1b272ab4209077b71df6590c14b63710ca25f6d9a4482f566"} err="failed to get container status \"1f14f681d68b54d1b272ab4209077b71df6590c14b63710ca25f6d9a4482f566\": rpc error: code = NotFound desc = could not find container \"1f14f681d68b54d1b272ab4209077b71df6590c14b63710ca25f6d9a4482f566\": container with ID starting with 1f14f681d68b54d1b272ab4209077b71df6590c14b63710ca25f6d9a4482f566 not found: ID does not exist" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.501492 4612 scope.go:117] "RemoveContainer" containerID="c8a6464b47026b8897900a01567bd06efc8eb677387040cd69c2f67c06682ffd" Dec 03 08:48:03 crc kubenswrapper[4612]: E1203 08:48:03.502345 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8a6464b47026b8897900a01567bd06efc8eb677387040cd69c2f67c06682ffd\": container with ID starting with c8a6464b47026b8897900a01567bd06efc8eb677387040cd69c2f67c06682ffd not found: ID does not exist" containerID="c8a6464b47026b8897900a01567bd06efc8eb677387040cd69c2f67c06682ffd" Dec 03 08:48:03 crc kubenswrapper[4612]: I1203 08:48:03.502373 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8a6464b47026b8897900a01567bd06efc8eb677387040cd69c2f67c06682ffd"} err="failed to get container status \"c8a6464b47026b8897900a01567bd06efc8eb677387040cd69c2f67c06682ffd\": rpc error: code = NotFound desc = could not find container \"c8a6464b47026b8897900a01567bd06efc8eb677387040cd69c2f67c06682ffd\": container with ID starting with c8a6464b47026b8897900a01567bd06efc8eb677387040cd69c2f67c06682ffd not found: ID does not exist" Dec 03 08:48:05 crc kubenswrapper[4612]: I1203 08:48:05.105516 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88a8e13d-1a9a-4594-8eda-23715581f003" path="/var/lib/kubelet/pods/88a8e13d-1a9a-4594-8eda-23715581f003/volumes" Dec 03 08:49:17 crc kubenswrapper[4612]: I1203 08:49:17.135641 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:49:17 crc kubenswrapper[4612]: I1203 08:49:17.136967 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.540001 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dxf4p"] Dec 03 08:49:27 crc kubenswrapper[4612]: E1203 08:49:27.540926 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88a8e13d-1a9a-4594-8eda-23715581f003" containerName="registry-server" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.540952 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="88a8e13d-1a9a-4594-8eda-23715581f003" containerName="registry-server" Dec 03 08:49:27 crc kubenswrapper[4612]: E1203 08:49:27.544978 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88a8e13d-1a9a-4594-8eda-23715581f003" containerName="extract-content" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.545008 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="88a8e13d-1a9a-4594-8eda-23715581f003" containerName="extract-content" Dec 03 08:49:27 crc kubenswrapper[4612]: E1203 08:49:27.545017 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88a8e13d-1a9a-4594-8eda-23715581f003" containerName="extract-utilities" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.545024 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="88a8e13d-1a9a-4594-8eda-23715581f003" containerName="extract-utilities" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.545324 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="88a8e13d-1a9a-4594-8eda-23715581f003" containerName="registry-server" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.546880 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.569677 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dxf4p"] Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.731463 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7sgz\" (UniqueName: \"kubernetes.io/projected/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-kube-api-access-m7sgz\") pod \"community-operators-dxf4p\" (UID: \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\") " pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.732007 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-catalog-content\") pod \"community-operators-dxf4p\" (UID: \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\") " pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.732124 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-utilities\") pod \"community-operators-dxf4p\" (UID: \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\") " pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.833115 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7sgz\" (UniqueName: \"kubernetes.io/projected/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-kube-api-access-m7sgz\") pod \"community-operators-dxf4p\" (UID: \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\") " pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.833231 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-catalog-content\") pod \"community-operators-dxf4p\" (UID: \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\") " pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.833274 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-utilities\") pod \"community-operators-dxf4p\" (UID: \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\") " pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.833822 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-utilities\") pod \"community-operators-dxf4p\" (UID: \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\") " pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.833868 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-catalog-content\") pod \"community-operators-dxf4p\" (UID: \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\") " pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.860850 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7sgz\" (UniqueName: \"kubernetes.io/projected/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-kube-api-access-m7sgz\") pod \"community-operators-dxf4p\" (UID: \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\") " pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:27 crc kubenswrapper[4612]: I1203 08:49:27.870612 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:28 crc kubenswrapper[4612]: I1203 08:49:28.211205 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dxf4p"] Dec 03 08:49:28 crc kubenswrapper[4612]: I1203 08:49:28.275454 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dxf4p" event={"ID":"4d6748c8-8702-4c81-bf55-4df95b3b8f6c","Type":"ContainerStarted","Data":"c72ff513070f2aa26853cc24f86ac24511838cd1543ffe85b756deddf062c74c"} Dec 03 08:49:29 crc kubenswrapper[4612]: I1203 08:49:29.288646 4612 generic.go:334] "Generic (PLEG): container finished" podID="4d6748c8-8702-4c81-bf55-4df95b3b8f6c" containerID="49e293df69a70f9d55e740c1177510ab19782d617fab46cc2afba3b4a2952a9e" exitCode=0 Dec 03 08:49:29 crc kubenswrapper[4612]: I1203 08:49:29.288824 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dxf4p" event={"ID":"4d6748c8-8702-4c81-bf55-4df95b3b8f6c","Type":"ContainerDied","Data":"49e293df69a70f9d55e740c1177510ab19782d617fab46cc2afba3b4a2952a9e"} Dec 03 08:49:30 crc kubenswrapper[4612]: I1203 08:49:30.303519 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dxf4p" event={"ID":"4d6748c8-8702-4c81-bf55-4df95b3b8f6c","Type":"ContainerStarted","Data":"ec9e32bbd22cf23642ca3320414d45853d4aac23596ef5a3b38e8e5fa2b1aecf"} Dec 03 08:49:30 crc kubenswrapper[4612]: I1203 08:49:30.620479 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-m28gf/must-gather-jlc2c"] Dec 03 08:49:30 crc kubenswrapper[4612]: I1203 08:49:30.621912 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/must-gather-jlc2c" Dec 03 08:49:30 crc kubenswrapper[4612]: I1203 08:49:30.624235 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-m28gf"/"default-dockercfg-gj9p2" Dec 03 08:49:30 crc kubenswrapper[4612]: I1203 08:49:30.624922 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-m28gf"/"kube-root-ca.crt" Dec 03 08:49:30 crc kubenswrapper[4612]: I1203 08:49:30.626022 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-m28gf"/"openshift-service-ca.crt" Dec 03 08:49:30 crc kubenswrapper[4612]: I1203 08:49:30.730227 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7j4r2\" (UniqueName: \"kubernetes.io/projected/5d795c66-eec8-4138-ba0a-1aaa62ce51d6-kube-api-access-7j4r2\") pod \"must-gather-jlc2c\" (UID: \"5d795c66-eec8-4138-ba0a-1aaa62ce51d6\") " pod="openshift-must-gather-m28gf/must-gather-jlc2c" Dec 03 08:49:30 crc kubenswrapper[4612]: I1203 08:49:30.730536 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5d795c66-eec8-4138-ba0a-1aaa62ce51d6-must-gather-output\") pod \"must-gather-jlc2c\" (UID: \"5d795c66-eec8-4138-ba0a-1aaa62ce51d6\") " pod="openshift-must-gather-m28gf/must-gather-jlc2c" Dec 03 08:49:30 crc kubenswrapper[4612]: I1203 08:49:30.766792 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-m28gf/must-gather-jlc2c"] Dec 03 08:49:30 crc kubenswrapper[4612]: I1203 08:49:30.832278 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5d795c66-eec8-4138-ba0a-1aaa62ce51d6-must-gather-output\") pod \"must-gather-jlc2c\" (UID: \"5d795c66-eec8-4138-ba0a-1aaa62ce51d6\") " pod="openshift-must-gather-m28gf/must-gather-jlc2c" Dec 03 08:49:30 crc kubenswrapper[4612]: I1203 08:49:30.832334 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7j4r2\" (UniqueName: \"kubernetes.io/projected/5d795c66-eec8-4138-ba0a-1aaa62ce51d6-kube-api-access-7j4r2\") pod \"must-gather-jlc2c\" (UID: \"5d795c66-eec8-4138-ba0a-1aaa62ce51d6\") " pod="openshift-must-gather-m28gf/must-gather-jlc2c" Dec 03 08:49:30 crc kubenswrapper[4612]: I1203 08:49:30.832859 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5d795c66-eec8-4138-ba0a-1aaa62ce51d6-must-gather-output\") pod \"must-gather-jlc2c\" (UID: \"5d795c66-eec8-4138-ba0a-1aaa62ce51d6\") " pod="openshift-must-gather-m28gf/must-gather-jlc2c" Dec 03 08:49:30 crc kubenswrapper[4612]: I1203 08:49:30.851793 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7j4r2\" (UniqueName: \"kubernetes.io/projected/5d795c66-eec8-4138-ba0a-1aaa62ce51d6-kube-api-access-7j4r2\") pod \"must-gather-jlc2c\" (UID: \"5d795c66-eec8-4138-ba0a-1aaa62ce51d6\") " pod="openshift-must-gather-m28gf/must-gather-jlc2c" Dec 03 08:49:30 crc kubenswrapper[4612]: I1203 08:49:30.948746 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/must-gather-jlc2c" Dec 03 08:49:31 crc kubenswrapper[4612]: I1203 08:49:31.316421 4612 generic.go:334] "Generic (PLEG): container finished" podID="4d6748c8-8702-4c81-bf55-4df95b3b8f6c" containerID="ec9e32bbd22cf23642ca3320414d45853d4aac23596ef5a3b38e8e5fa2b1aecf" exitCode=0 Dec 03 08:49:31 crc kubenswrapper[4612]: I1203 08:49:31.316737 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dxf4p" event={"ID":"4d6748c8-8702-4c81-bf55-4df95b3b8f6c","Type":"ContainerDied","Data":"ec9e32bbd22cf23642ca3320414d45853d4aac23596ef5a3b38e8e5fa2b1aecf"} Dec 03 08:49:31 crc kubenswrapper[4612]: I1203 08:49:31.457022 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-m28gf/must-gather-jlc2c"] Dec 03 08:49:31 crc kubenswrapper[4612]: W1203 08:49:31.460624 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d795c66_eec8_4138_ba0a_1aaa62ce51d6.slice/crio-28d9c484f5bdf359d65fb60b2cf765299c8773f603f17b21d5a45937112a95e7 WatchSource:0}: Error finding container 28d9c484f5bdf359d65fb60b2cf765299c8773f603f17b21d5a45937112a95e7: Status 404 returned error can't find the container with id 28d9c484f5bdf359d65fb60b2cf765299c8773f603f17b21d5a45937112a95e7 Dec 03 08:49:32 crc kubenswrapper[4612]: I1203 08:49:32.350206 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m28gf/must-gather-jlc2c" event={"ID":"5d795c66-eec8-4138-ba0a-1aaa62ce51d6","Type":"ContainerStarted","Data":"2824a627fdf0b98e3c430f402d12388fb11d94c8d36f436747b9f890f9fea03b"} Dec 03 08:49:32 crc kubenswrapper[4612]: I1203 08:49:32.350466 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m28gf/must-gather-jlc2c" event={"ID":"5d795c66-eec8-4138-ba0a-1aaa62ce51d6","Type":"ContainerStarted","Data":"48b0227fcb04bfd836e1ea29eebb878f99ca5fe8ecf73caa1184934e5009e6b3"} Dec 03 08:49:32 crc kubenswrapper[4612]: I1203 08:49:32.350477 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m28gf/must-gather-jlc2c" event={"ID":"5d795c66-eec8-4138-ba0a-1aaa62ce51d6","Type":"ContainerStarted","Data":"28d9c484f5bdf359d65fb60b2cf765299c8773f603f17b21d5a45937112a95e7"} Dec 03 08:49:32 crc kubenswrapper[4612]: I1203 08:49:32.354472 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dxf4p" event={"ID":"4d6748c8-8702-4c81-bf55-4df95b3b8f6c","Type":"ContainerStarted","Data":"07d1572b9137a8b8425f0cdb3c76f05671cf4226a2d20b9ac81d6796a1b1d167"} Dec 03 08:49:32 crc kubenswrapper[4612]: I1203 08:49:32.377667 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-m28gf/must-gather-jlc2c" podStartSLOduration=2.377655219 podStartE2EDuration="2.377655219s" podCreationTimestamp="2025-12-03 08:49:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:49:32.375899006 +0000 UTC m=+4935.549256406" watchObservedRunningTime="2025-12-03 08:49:32.377655219 +0000 UTC m=+4935.551012619" Dec 03 08:49:32 crc kubenswrapper[4612]: I1203 08:49:32.399477 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dxf4p" podStartSLOduration=2.884705548 podStartE2EDuration="5.399443617s" podCreationTimestamp="2025-12-03 08:49:27 +0000 UTC" firstStartedPulling="2025-12-03 08:49:29.291377867 +0000 UTC m=+4932.464735267" lastFinishedPulling="2025-12-03 08:49:31.806115936 +0000 UTC m=+4934.979473336" observedRunningTime="2025-12-03 08:49:32.39421063 +0000 UTC m=+4935.567568040" watchObservedRunningTime="2025-12-03 08:49:32.399443617 +0000 UTC m=+4935.572801007" Dec 03 08:49:37 crc kubenswrapper[4612]: I1203 08:49:37.853266 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-m28gf/crc-debug-rt6v7"] Dec 03 08:49:37 crc kubenswrapper[4612]: I1203 08:49:37.854919 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/crc-debug-rt6v7" Dec 03 08:49:37 crc kubenswrapper[4612]: I1203 08:49:37.870845 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:37 crc kubenswrapper[4612]: I1203 08:49:37.870973 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:37 crc kubenswrapper[4612]: I1203 08:49:37.932364 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:37 crc kubenswrapper[4612]: I1203 08:49:37.963411 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcvm2\" (UniqueName: \"kubernetes.io/projected/368840af-706d-4553-8bea-bf969a74c1c9-kube-api-access-vcvm2\") pod \"crc-debug-rt6v7\" (UID: \"368840af-706d-4553-8bea-bf969a74c1c9\") " pod="openshift-must-gather-m28gf/crc-debug-rt6v7" Dec 03 08:49:37 crc kubenswrapper[4612]: I1203 08:49:37.963540 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/368840af-706d-4553-8bea-bf969a74c1c9-host\") pod \"crc-debug-rt6v7\" (UID: \"368840af-706d-4553-8bea-bf969a74c1c9\") " pod="openshift-must-gather-m28gf/crc-debug-rt6v7" Dec 03 08:49:38 crc kubenswrapper[4612]: I1203 08:49:38.064893 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcvm2\" (UniqueName: \"kubernetes.io/projected/368840af-706d-4553-8bea-bf969a74c1c9-kube-api-access-vcvm2\") pod \"crc-debug-rt6v7\" (UID: \"368840af-706d-4553-8bea-bf969a74c1c9\") " pod="openshift-must-gather-m28gf/crc-debug-rt6v7" Dec 03 08:49:38 crc kubenswrapper[4612]: I1203 08:49:38.065278 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/368840af-706d-4553-8bea-bf969a74c1c9-host\") pod \"crc-debug-rt6v7\" (UID: \"368840af-706d-4553-8bea-bf969a74c1c9\") " pod="openshift-must-gather-m28gf/crc-debug-rt6v7" Dec 03 08:49:38 crc kubenswrapper[4612]: I1203 08:49:38.065394 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/368840af-706d-4553-8bea-bf969a74c1c9-host\") pod \"crc-debug-rt6v7\" (UID: \"368840af-706d-4553-8bea-bf969a74c1c9\") " pod="openshift-must-gather-m28gf/crc-debug-rt6v7" Dec 03 08:49:38 crc kubenswrapper[4612]: I1203 08:49:38.400629 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcvm2\" (UniqueName: \"kubernetes.io/projected/368840af-706d-4553-8bea-bf969a74c1c9-kube-api-access-vcvm2\") pod \"crc-debug-rt6v7\" (UID: \"368840af-706d-4553-8bea-bf969a74c1c9\") " pod="openshift-must-gather-m28gf/crc-debug-rt6v7" Dec 03 08:49:38 crc kubenswrapper[4612]: I1203 08:49:38.473425 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/crc-debug-rt6v7" Dec 03 08:49:38 crc kubenswrapper[4612]: I1203 08:49:38.479228 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:38 crc kubenswrapper[4612]: I1203 08:49:38.589364 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dxf4p"] Dec 03 08:49:39 crc kubenswrapper[4612]: I1203 08:49:39.428179 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m28gf/crc-debug-rt6v7" event={"ID":"368840af-706d-4553-8bea-bf969a74c1c9","Type":"ContainerStarted","Data":"e4fe3eca296924ac5e716c59e2be444b5f02acb579edd1df7242606e3b44f458"} Dec 03 08:49:39 crc kubenswrapper[4612]: I1203 08:49:39.428511 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m28gf/crc-debug-rt6v7" event={"ID":"368840af-706d-4553-8bea-bf969a74c1c9","Type":"ContainerStarted","Data":"0f3b98e1118280989049d814d44592cdaa8e40287521899aabdbf7cae1c44f21"} Dec 03 08:49:39 crc kubenswrapper[4612]: I1203 08:49:39.445512 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-m28gf/crc-debug-rt6v7" podStartSLOduration=2.445490029 podStartE2EDuration="2.445490029s" podCreationTimestamp="2025-12-03 08:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:49:39.441694307 +0000 UTC m=+4942.615051727" watchObservedRunningTime="2025-12-03 08:49:39.445490029 +0000 UTC m=+4942.618847429" Dec 03 08:49:40 crc kubenswrapper[4612]: I1203 08:49:40.436018 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dxf4p" podUID="4d6748c8-8702-4c81-bf55-4df95b3b8f6c" containerName="registry-server" containerID="cri-o://07d1572b9137a8b8425f0cdb3c76f05671cf4226a2d20b9ac81d6796a1b1d167" gracePeriod=2 Dec 03 08:49:40 crc kubenswrapper[4612]: I1203 08:49:40.938339 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.023047 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7sgz\" (UniqueName: \"kubernetes.io/projected/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-kube-api-access-m7sgz\") pod \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\" (UID: \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\") " Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.023246 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-catalog-content\") pod \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\" (UID: \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\") " Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.023328 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-utilities\") pod \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\" (UID: \"4d6748c8-8702-4c81-bf55-4df95b3b8f6c\") " Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.024292 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-utilities" (OuterVolumeSpecName: "utilities") pod "4d6748c8-8702-4c81-bf55-4df95b3b8f6c" (UID: "4d6748c8-8702-4c81-bf55-4df95b3b8f6c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.030169 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-kube-api-access-m7sgz" (OuterVolumeSpecName: "kube-api-access-m7sgz") pod "4d6748c8-8702-4c81-bf55-4df95b3b8f6c" (UID: "4d6748c8-8702-4c81-bf55-4df95b3b8f6c"). InnerVolumeSpecName "kube-api-access-m7sgz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.094390 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d6748c8-8702-4c81-bf55-4df95b3b8f6c" (UID: "4d6748c8-8702-4c81-bf55-4df95b3b8f6c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.125317 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.125351 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.125361 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7sgz\" (UniqueName: \"kubernetes.io/projected/4d6748c8-8702-4c81-bf55-4df95b3b8f6c-kube-api-access-m7sgz\") on node \"crc\" DevicePath \"\"" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.445892 4612 generic.go:334] "Generic (PLEG): container finished" podID="4d6748c8-8702-4c81-bf55-4df95b3b8f6c" containerID="07d1572b9137a8b8425f0cdb3c76f05671cf4226a2d20b9ac81d6796a1b1d167" exitCode=0 Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.446222 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dxf4p" event={"ID":"4d6748c8-8702-4c81-bf55-4df95b3b8f6c","Type":"ContainerDied","Data":"07d1572b9137a8b8425f0cdb3c76f05671cf4226a2d20b9ac81d6796a1b1d167"} Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.446267 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dxf4p" event={"ID":"4d6748c8-8702-4c81-bf55-4df95b3b8f6c","Type":"ContainerDied","Data":"c72ff513070f2aa26853cc24f86ac24511838cd1543ffe85b756deddf062c74c"} Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.446289 4612 scope.go:117] "RemoveContainer" containerID="07d1572b9137a8b8425f0cdb3c76f05671cf4226a2d20b9ac81d6796a1b1d167" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.446432 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dxf4p" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.471997 4612 scope.go:117] "RemoveContainer" containerID="ec9e32bbd22cf23642ca3320414d45853d4aac23596ef5a3b38e8e5fa2b1aecf" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.479975 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dxf4p"] Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.490074 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dxf4p"] Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.504064 4612 scope.go:117] "RemoveContainer" containerID="49e293df69a70f9d55e740c1177510ab19782d617fab46cc2afba3b4a2952a9e" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.544840 4612 scope.go:117] "RemoveContainer" containerID="07d1572b9137a8b8425f0cdb3c76f05671cf4226a2d20b9ac81d6796a1b1d167" Dec 03 08:49:41 crc kubenswrapper[4612]: E1203 08:49:41.545550 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07d1572b9137a8b8425f0cdb3c76f05671cf4226a2d20b9ac81d6796a1b1d167\": container with ID starting with 07d1572b9137a8b8425f0cdb3c76f05671cf4226a2d20b9ac81d6796a1b1d167 not found: ID does not exist" containerID="07d1572b9137a8b8425f0cdb3c76f05671cf4226a2d20b9ac81d6796a1b1d167" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.545604 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07d1572b9137a8b8425f0cdb3c76f05671cf4226a2d20b9ac81d6796a1b1d167"} err="failed to get container status \"07d1572b9137a8b8425f0cdb3c76f05671cf4226a2d20b9ac81d6796a1b1d167\": rpc error: code = NotFound desc = could not find container \"07d1572b9137a8b8425f0cdb3c76f05671cf4226a2d20b9ac81d6796a1b1d167\": container with ID starting with 07d1572b9137a8b8425f0cdb3c76f05671cf4226a2d20b9ac81d6796a1b1d167 not found: ID does not exist" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.545628 4612 scope.go:117] "RemoveContainer" containerID="ec9e32bbd22cf23642ca3320414d45853d4aac23596ef5a3b38e8e5fa2b1aecf" Dec 03 08:49:41 crc kubenswrapper[4612]: E1203 08:49:41.545960 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec9e32bbd22cf23642ca3320414d45853d4aac23596ef5a3b38e8e5fa2b1aecf\": container with ID starting with ec9e32bbd22cf23642ca3320414d45853d4aac23596ef5a3b38e8e5fa2b1aecf not found: ID does not exist" containerID="ec9e32bbd22cf23642ca3320414d45853d4aac23596ef5a3b38e8e5fa2b1aecf" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.545997 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec9e32bbd22cf23642ca3320414d45853d4aac23596ef5a3b38e8e5fa2b1aecf"} err="failed to get container status \"ec9e32bbd22cf23642ca3320414d45853d4aac23596ef5a3b38e8e5fa2b1aecf\": rpc error: code = NotFound desc = could not find container \"ec9e32bbd22cf23642ca3320414d45853d4aac23596ef5a3b38e8e5fa2b1aecf\": container with ID starting with ec9e32bbd22cf23642ca3320414d45853d4aac23596ef5a3b38e8e5fa2b1aecf not found: ID does not exist" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.546016 4612 scope.go:117] "RemoveContainer" containerID="49e293df69a70f9d55e740c1177510ab19782d617fab46cc2afba3b4a2952a9e" Dec 03 08:49:41 crc kubenswrapper[4612]: E1203 08:49:41.546273 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49e293df69a70f9d55e740c1177510ab19782d617fab46cc2afba3b4a2952a9e\": container with ID starting with 49e293df69a70f9d55e740c1177510ab19782d617fab46cc2afba3b4a2952a9e not found: ID does not exist" containerID="49e293df69a70f9d55e740c1177510ab19782d617fab46cc2afba3b4a2952a9e" Dec 03 08:49:41 crc kubenswrapper[4612]: I1203 08:49:41.546294 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49e293df69a70f9d55e740c1177510ab19782d617fab46cc2afba3b4a2952a9e"} err="failed to get container status \"49e293df69a70f9d55e740c1177510ab19782d617fab46cc2afba3b4a2952a9e\": rpc error: code = NotFound desc = could not find container \"49e293df69a70f9d55e740c1177510ab19782d617fab46cc2afba3b4a2952a9e\": container with ID starting with 49e293df69a70f9d55e740c1177510ab19782d617fab46cc2afba3b4a2952a9e not found: ID does not exist" Dec 03 08:49:43 crc kubenswrapper[4612]: I1203 08:49:43.108619 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d6748c8-8702-4c81-bf55-4df95b3b8f6c" path="/var/lib/kubelet/pods/4d6748c8-8702-4c81-bf55-4df95b3b8f6c/volumes" Dec 03 08:49:47 crc kubenswrapper[4612]: I1203 08:49:47.135508 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:49:47 crc kubenswrapper[4612]: I1203 08:49:47.136099 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:50:17 crc kubenswrapper[4612]: I1203 08:50:17.135562 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:50:17 crc kubenswrapper[4612]: I1203 08:50:17.136138 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:50:17 crc kubenswrapper[4612]: I1203 08:50:17.136183 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 08:50:17 crc kubenswrapper[4612]: I1203 08:50:17.136696 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:50:17 crc kubenswrapper[4612]: I1203 08:50:17.136749 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" gracePeriod=600 Dec 03 08:50:17 crc kubenswrapper[4612]: E1203 08:50:17.266059 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:50:17 crc kubenswrapper[4612]: I1203 08:50:17.811437 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" exitCode=0 Dec 03 08:50:17 crc kubenswrapper[4612]: I1203 08:50:17.811499 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d"} Dec 03 08:50:17 crc kubenswrapper[4612]: I1203 08:50:17.811555 4612 scope.go:117] "RemoveContainer" containerID="0bcf830ffe2e79df0eb12515a9792b4731d2ecc59a39866247af5869720338dc" Dec 03 08:50:17 crc kubenswrapper[4612]: I1203 08:50:17.812269 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:50:17 crc kubenswrapper[4612]: E1203 08:50:17.812661 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:50:21 crc kubenswrapper[4612]: I1203 08:50:21.844360 4612 generic.go:334] "Generic (PLEG): container finished" podID="368840af-706d-4553-8bea-bf969a74c1c9" containerID="e4fe3eca296924ac5e716c59e2be444b5f02acb579edd1df7242606e3b44f458" exitCode=0 Dec 03 08:50:21 crc kubenswrapper[4612]: I1203 08:50:21.844433 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m28gf/crc-debug-rt6v7" event={"ID":"368840af-706d-4553-8bea-bf969a74c1c9","Type":"ContainerDied","Data":"e4fe3eca296924ac5e716c59e2be444b5f02acb579edd1df7242606e3b44f458"} Dec 03 08:50:22 crc kubenswrapper[4612]: I1203 08:50:22.962602 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/crc-debug-rt6v7" Dec 03 08:50:22 crc kubenswrapper[4612]: I1203 08:50:22.999710 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-m28gf/crc-debug-rt6v7"] Dec 03 08:50:23 crc kubenswrapper[4612]: I1203 08:50:23.007689 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-m28gf/crc-debug-rt6v7"] Dec 03 08:50:23 crc kubenswrapper[4612]: I1203 08:50:23.141214 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vcvm2\" (UniqueName: \"kubernetes.io/projected/368840af-706d-4553-8bea-bf969a74c1c9-kube-api-access-vcvm2\") pod \"368840af-706d-4553-8bea-bf969a74c1c9\" (UID: \"368840af-706d-4553-8bea-bf969a74c1c9\") " Dec 03 08:50:23 crc kubenswrapper[4612]: I1203 08:50:23.141258 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/368840af-706d-4553-8bea-bf969a74c1c9-host\") pod \"368840af-706d-4553-8bea-bf969a74c1c9\" (UID: \"368840af-706d-4553-8bea-bf969a74c1c9\") " Dec 03 08:50:23 crc kubenswrapper[4612]: I1203 08:50:23.141409 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/368840af-706d-4553-8bea-bf969a74c1c9-host" (OuterVolumeSpecName: "host") pod "368840af-706d-4553-8bea-bf969a74c1c9" (UID: "368840af-706d-4553-8bea-bf969a74c1c9"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:50:23 crc kubenswrapper[4612]: I1203 08:50:23.141682 4612 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/368840af-706d-4553-8bea-bf969a74c1c9-host\") on node \"crc\" DevicePath \"\"" Dec 03 08:50:23 crc kubenswrapper[4612]: I1203 08:50:23.157958 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/368840af-706d-4553-8bea-bf969a74c1c9-kube-api-access-vcvm2" (OuterVolumeSpecName: "kube-api-access-vcvm2") pod "368840af-706d-4553-8bea-bf969a74c1c9" (UID: "368840af-706d-4553-8bea-bf969a74c1c9"). InnerVolumeSpecName "kube-api-access-vcvm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:50:23 crc kubenswrapper[4612]: I1203 08:50:23.243297 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vcvm2\" (UniqueName: \"kubernetes.io/projected/368840af-706d-4553-8bea-bf969a74c1c9-kube-api-access-vcvm2\") on node \"crc\" DevicePath \"\"" Dec 03 08:50:23 crc kubenswrapper[4612]: I1203 08:50:23.864511 4612 scope.go:117] "RemoveContainer" containerID="e4fe3eca296924ac5e716c59e2be444b5f02acb579edd1df7242606e3b44f458" Dec 03 08:50:23 crc kubenswrapper[4612]: I1203 08:50:23.864569 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/crc-debug-rt6v7" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.163545 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-m28gf/crc-debug-4lt2n"] Dec 03 08:50:24 crc kubenswrapper[4612]: E1203 08:50:24.164008 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="368840af-706d-4553-8bea-bf969a74c1c9" containerName="container-00" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.164022 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="368840af-706d-4553-8bea-bf969a74c1c9" containerName="container-00" Dec 03 08:50:24 crc kubenswrapper[4612]: E1203 08:50:24.164038 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d6748c8-8702-4c81-bf55-4df95b3b8f6c" containerName="registry-server" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.164046 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d6748c8-8702-4c81-bf55-4df95b3b8f6c" containerName="registry-server" Dec 03 08:50:24 crc kubenswrapper[4612]: E1203 08:50:24.164061 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d6748c8-8702-4c81-bf55-4df95b3b8f6c" containerName="extract-utilities" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.164071 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d6748c8-8702-4c81-bf55-4df95b3b8f6c" containerName="extract-utilities" Dec 03 08:50:24 crc kubenswrapper[4612]: E1203 08:50:24.164089 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d6748c8-8702-4c81-bf55-4df95b3b8f6c" containerName="extract-content" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.164097 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d6748c8-8702-4c81-bf55-4df95b3b8f6c" containerName="extract-content" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.164349 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="368840af-706d-4553-8bea-bf969a74c1c9" containerName="container-00" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.164376 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d6748c8-8702-4c81-bf55-4df95b3b8f6c" containerName="registry-server" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.165063 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/crc-debug-4lt2n" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.259739 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d563f40-8ceb-4741-b7e9-6e1815f3829e-host\") pod \"crc-debug-4lt2n\" (UID: \"1d563f40-8ceb-4741-b7e9-6e1815f3829e\") " pod="openshift-must-gather-m28gf/crc-debug-4lt2n" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.260126 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46sqm\" (UniqueName: \"kubernetes.io/projected/1d563f40-8ceb-4741-b7e9-6e1815f3829e-kube-api-access-46sqm\") pod \"crc-debug-4lt2n\" (UID: \"1d563f40-8ceb-4741-b7e9-6e1815f3829e\") " pod="openshift-must-gather-m28gf/crc-debug-4lt2n" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.361710 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d563f40-8ceb-4741-b7e9-6e1815f3829e-host\") pod \"crc-debug-4lt2n\" (UID: \"1d563f40-8ceb-4741-b7e9-6e1815f3829e\") " pod="openshift-must-gather-m28gf/crc-debug-4lt2n" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.361843 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d563f40-8ceb-4741-b7e9-6e1815f3829e-host\") pod \"crc-debug-4lt2n\" (UID: \"1d563f40-8ceb-4741-b7e9-6e1815f3829e\") " pod="openshift-must-gather-m28gf/crc-debug-4lt2n" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.361848 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46sqm\" (UniqueName: \"kubernetes.io/projected/1d563f40-8ceb-4741-b7e9-6e1815f3829e-kube-api-access-46sqm\") pod \"crc-debug-4lt2n\" (UID: \"1d563f40-8ceb-4741-b7e9-6e1815f3829e\") " pod="openshift-must-gather-m28gf/crc-debug-4lt2n" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.384886 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46sqm\" (UniqueName: \"kubernetes.io/projected/1d563f40-8ceb-4741-b7e9-6e1815f3829e-kube-api-access-46sqm\") pod \"crc-debug-4lt2n\" (UID: \"1d563f40-8ceb-4741-b7e9-6e1815f3829e\") " pod="openshift-must-gather-m28gf/crc-debug-4lt2n" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.486678 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/crc-debug-4lt2n" Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.873153 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m28gf/crc-debug-4lt2n" event={"ID":"1d563f40-8ceb-4741-b7e9-6e1815f3829e","Type":"ContainerStarted","Data":"c8e7bde2a2fced0992303fc80c19eea6ecb48147289d6fcf06a8d69e6d577ecb"} Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.873535 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m28gf/crc-debug-4lt2n" event={"ID":"1d563f40-8ceb-4741-b7e9-6e1815f3829e","Type":"ContainerStarted","Data":"ad23ffc4aee3c692ad4078fe795b60c00708c229b16a428e30728d71171679a4"} Dec 03 08:50:24 crc kubenswrapper[4612]: I1203 08:50:24.893544 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-m28gf/crc-debug-4lt2n" podStartSLOduration=0.893502037 podStartE2EDuration="893.502037ms" podCreationTimestamp="2025-12-03 08:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:50:24.884238072 +0000 UTC m=+4988.057595472" watchObservedRunningTime="2025-12-03 08:50:24.893502037 +0000 UTC m=+4988.066859447" Dec 03 08:50:25 crc kubenswrapper[4612]: I1203 08:50:25.102159 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="368840af-706d-4553-8bea-bf969a74c1c9" path="/var/lib/kubelet/pods/368840af-706d-4553-8bea-bf969a74c1c9/volumes" Dec 03 08:50:25 crc kubenswrapper[4612]: I1203 08:50:25.883078 4612 generic.go:334] "Generic (PLEG): container finished" podID="1d563f40-8ceb-4741-b7e9-6e1815f3829e" containerID="c8e7bde2a2fced0992303fc80c19eea6ecb48147289d6fcf06a8d69e6d577ecb" exitCode=0 Dec 03 08:50:25 crc kubenswrapper[4612]: I1203 08:50:25.883388 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m28gf/crc-debug-4lt2n" event={"ID":"1d563f40-8ceb-4741-b7e9-6e1815f3829e","Type":"ContainerDied","Data":"c8e7bde2a2fced0992303fc80c19eea6ecb48147289d6fcf06a8d69e6d577ecb"} Dec 03 08:50:26 crc kubenswrapper[4612]: I1203 08:50:26.993218 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/crc-debug-4lt2n" Dec 03 08:50:27 crc kubenswrapper[4612]: I1203 08:50:27.074318 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-m28gf/crc-debug-4lt2n"] Dec 03 08:50:27 crc kubenswrapper[4612]: I1203 08:50:27.085775 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-m28gf/crc-debug-4lt2n"] Dec 03 08:50:27 crc kubenswrapper[4612]: I1203 08:50:27.105634 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d563f40-8ceb-4741-b7e9-6e1815f3829e-host\") pod \"1d563f40-8ceb-4741-b7e9-6e1815f3829e\" (UID: \"1d563f40-8ceb-4741-b7e9-6e1815f3829e\") " Dec 03 08:50:27 crc kubenswrapper[4612]: I1203 08:50:27.105741 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1d563f40-8ceb-4741-b7e9-6e1815f3829e-host" (OuterVolumeSpecName: "host") pod "1d563f40-8ceb-4741-b7e9-6e1815f3829e" (UID: "1d563f40-8ceb-4741-b7e9-6e1815f3829e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:50:27 crc kubenswrapper[4612]: I1203 08:50:27.105802 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46sqm\" (UniqueName: \"kubernetes.io/projected/1d563f40-8ceb-4741-b7e9-6e1815f3829e-kube-api-access-46sqm\") pod \"1d563f40-8ceb-4741-b7e9-6e1815f3829e\" (UID: \"1d563f40-8ceb-4741-b7e9-6e1815f3829e\") " Dec 03 08:50:27 crc kubenswrapper[4612]: I1203 08:50:27.106268 4612 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d563f40-8ceb-4741-b7e9-6e1815f3829e-host\") on node \"crc\" DevicePath \"\"" Dec 03 08:50:27 crc kubenswrapper[4612]: I1203 08:50:27.127383 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d563f40-8ceb-4741-b7e9-6e1815f3829e-kube-api-access-46sqm" (OuterVolumeSpecName: "kube-api-access-46sqm") pod "1d563f40-8ceb-4741-b7e9-6e1815f3829e" (UID: "1d563f40-8ceb-4741-b7e9-6e1815f3829e"). InnerVolumeSpecName "kube-api-access-46sqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:50:27 crc kubenswrapper[4612]: I1203 08:50:27.208079 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46sqm\" (UniqueName: \"kubernetes.io/projected/1d563f40-8ceb-4741-b7e9-6e1815f3829e-kube-api-access-46sqm\") on node \"crc\" DevicePath \"\"" Dec 03 08:50:27 crc kubenswrapper[4612]: I1203 08:50:27.906780 4612 scope.go:117] "RemoveContainer" containerID="c8e7bde2a2fced0992303fc80c19eea6ecb48147289d6fcf06a8d69e6d577ecb" Dec 03 08:50:27 crc kubenswrapper[4612]: I1203 08:50:27.906884 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/crc-debug-4lt2n" Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.236970 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-m28gf/crc-debug-bs46b"] Dec 03 08:50:28 crc kubenswrapper[4612]: E1203 08:50:28.237353 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d563f40-8ceb-4741-b7e9-6e1815f3829e" containerName="container-00" Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.237365 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d563f40-8ceb-4741-b7e9-6e1815f3829e" containerName="container-00" Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.237564 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d563f40-8ceb-4741-b7e9-6e1815f3829e" containerName="container-00" Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.238168 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/crc-debug-bs46b" Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.327143 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j42ph\" (UniqueName: \"kubernetes.io/projected/35d28757-ae06-4249-81ff-5e916d7d7062-kube-api-access-j42ph\") pod \"crc-debug-bs46b\" (UID: \"35d28757-ae06-4249-81ff-5e916d7d7062\") " pod="openshift-must-gather-m28gf/crc-debug-bs46b" Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.327322 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/35d28757-ae06-4249-81ff-5e916d7d7062-host\") pod \"crc-debug-bs46b\" (UID: \"35d28757-ae06-4249-81ff-5e916d7d7062\") " pod="openshift-must-gather-m28gf/crc-debug-bs46b" Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.429141 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j42ph\" (UniqueName: \"kubernetes.io/projected/35d28757-ae06-4249-81ff-5e916d7d7062-kube-api-access-j42ph\") pod \"crc-debug-bs46b\" (UID: \"35d28757-ae06-4249-81ff-5e916d7d7062\") " pod="openshift-must-gather-m28gf/crc-debug-bs46b" Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.429259 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/35d28757-ae06-4249-81ff-5e916d7d7062-host\") pod \"crc-debug-bs46b\" (UID: \"35d28757-ae06-4249-81ff-5e916d7d7062\") " pod="openshift-must-gather-m28gf/crc-debug-bs46b" Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.429334 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/35d28757-ae06-4249-81ff-5e916d7d7062-host\") pod \"crc-debug-bs46b\" (UID: \"35d28757-ae06-4249-81ff-5e916d7d7062\") " pod="openshift-must-gather-m28gf/crc-debug-bs46b" Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.460102 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j42ph\" (UniqueName: \"kubernetes.io/projected/35d28757-ae06-4249-81ff-5e916d7d7062-kube-api-access-j42ph\") pod \"crc-debug-bs46b\" (UID: \"35d28757-ae06-4249-81ff-5e916d7d7062\") " pod="openshift-must-gather-m28gf/crc-debug-bs46b" Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.556568 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/crc-debug-bs46b" Dec 03 08:50:28 crc kubenswrapper[4612]: W1203 08:50:28.603808 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35d28757_ae06_4249_81ff_5e916d7d7062.slice/crio-f25c1f77a604d16323fdaa5b6defe160f8029287c45545400917de6b4afb2194 WatchSource:0}: Error finding container f25c1f77a604d16323fdaa5b6defe160f8029287c45545400917de6b4afb2194: Status 404 returned error can't find the container with id f25c1f77a604d16323fdaa5b6defe160f8029287c45545400917de6b4afb2194 Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.916915 4612 generic.go:334] "Generic (PLEG): container finished" podID="35d28757-ae06-4249-81ff-5e916d7d7062" containerID="25763dee90c9c91ca2ce9ff07cf3c7cf37e0e6f3c916b1b0294b36627f969eb3" exitCode=0 Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.916999 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m28gf/crc-debug-bs46b" event={"ID":"35d28757-ae06-4249-81ff-5e916d7d7062","Type":"ContainerDied","Data":"25763dee90c9c91ca2ce9ff07cf3c7cf37e0e6f3c916b1b0294b36627f969eb3"} Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.917330 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m28gf/crc-debug-bs46b" event={"ID":"35d28757-ae06-4249-81ff-5e916d7d7062","Type":"ContainerStarted","Data":"f25c1f77a604d16323fdaa5b6defe160f8029287c45545400917de6b4afb2194"} Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.957349 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-m28gf/crc-debug-bs46b"] Dec 03 08:50:28 crc kubenswrapper[4612]: I1203 08:50:28.968082 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-m28gf/crc-debug-bs46b"] Dec 03 08:50:29 crc kubenswrapper[4612]: I1203 08:50:29.104198 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d563f40-8ceb-4741-b7e9-6e1815f3829e" path="/var/lib/kubelet/pods/1d563f40-8ceb-4741-b7e9-6e1815f3829e/volumes" Dec 03 08:50:30 crc kubenswrapper[4612]: I1203 08:50:30.048547 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/crc-debug-bs46b" Dec 03 08:50:30 crc kubenswrapper[4612]: I1203 08:50:30.159193 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j42ph\" (UniqueName: \"kubernetes.io/projected/35d28757-ae06-4249-81ff-5e916d7d7062-kube-api-access-j42ph\") pod \"35d28757-ae06-4249-81ff-5e916d7d7062\" (UID: \"35d28757-ae06-4249-81ff-5e916d7d7062\") " Dec 03 08:50:30 crc kubenswrapper[4612]: I1203 08:50:30.159542 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/35d28757-ae06-4249-81ff-5e916d7d7062-host\") pod \"35d28757-ae06-4249-81ff-5e916d7d7062\" (UID: \"35d28757-ae06-4249-81ff-5e916d7d7062\") " Dec 03 08:50:30 crc kubenswrapper[4612]: I1203 08:50:30.159607 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/35d28757-ae06-4249-81ff-5e916d7d7062-host" (OuterVolumeSpecName: "host") pod "35d28757-ae06-4249-81ff-5e916d7d7062" (UID: "35d28757-ae06-4249-81ff-5e916d7d7062"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:50:30 crc kubenswrapper[4612]: I1203 08:50:30.160132 4612 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/35d28757-ae06-4249-81ff-5e916d7d7062-host\") on node \"crc\" DevicePath \"\"" Dec 03 08:50:30 crc kubenswrapper[4612]: I1203 08:50:30.173289 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35d28757-ae06-4249-81ff-5e916d7d7062-kube-api-access-j42ph" (OuterVolumeSpecName: "kube-api-access-j42ph") pod "35d28757-ae06-4249-81ff-5e916d7d7062" (UID: "35d28757-ae06-4249-81ff-5e916d7d7062"). InnerVolumeSpecName "kube-api-access-j42ph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:50:30 crc kubenswrapper[4612]: I1203 08:50:30.262181 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j42ph\" (UniqueName: \"kubernetes.io/projected/35d28757-ae06-4249-81ff-5e916d7d7062-kube-api-access-j42ph\") on node \"crc\" DevicePath \"\"" Dec 03 08:50:30 crc kubenswrapper[4612]: I1203 08:50:30.935585 4612 scope.go:117] "RemoveContainer" containerID="25763dee90c9c91ca2ce9ff07cf3c7cf37e0e6f3c916b1b0294b36627f969eb3" Dec 03 08:50:30 crc kubenswrapper[4612]: I1203 08:50:30.935629 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/crc-debug-bs46b" Dec 03 08:50:31 crc kubenswrapper[4612]: I1203 08:50:31.090731 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:50:31 crc kubenswrapper[4612]: E1203 08:50:31.091068 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:50:31 crc kubenswrapper[4612]: I1203 08:50:31.100243 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35d28757-ae06-4249-81ff-5e916d7d7062" path="/var/lib/kubelet/pods/35d28757-ae06-4249-81ff-5e916d7d7062/volumes" Dec 03 08:50:46 crc kubenswrapper[4612]: I1203 08:50:46.089204 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:50:46 crc kubenswrapper[4612]: E1203 08:50:46.090744 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:51:00 crc kubenswrapper[4612]: I1203 08:51:00.089750 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:51:00 crc kubenswrapper[4612]: E1203 08:51:00.090639 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:51:11 crc kubenswrapper[4612]: I1203 08:51:11.709181 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5687f788c4-jknl6_ad775971-28a0-4fd6-8e8b-e10e5a9c0c50/barbican-api/0.log" Dec 03 08:51:11 crc kubenswrapper[4612]: I1203 08:51:11.940673 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-f87f554fd-p5qhz_4d772b9f-b6df-4ca8-8a76-f28285eef6b9/barbican-keystone-listener/0.log" Dec 03 08:51:11 crc kubenswrapper[4612]: I1203 08:51:11.964685 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5687f788c4-jknl6_ad775971-28a0-4fd6-8e8b-e10e5a9c0c50/barbican-api-log/0.log" Dec 03 08:51:12 crc kubenswrapper[4612]: I1203 08:51:12.000593 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-f87f554fd-p5qhz_4d772b9f-b6df-4ca8-8a76-f28285eef6b9/barbican-keystone-listener-log/0.log" Dec 03 08:51:12 crc kubenswrapper[4612]: I1203 08:51:12.089965 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:51:12 crc kubenswrapper[4612]: E1203 08:51:12.090258 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:51:12 crc kubenswrapper[4612]: I1203 08:51:12.175831 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c9bf5d979-4pmzw_9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16/barbican-worker/0.log" Dec 03 08:51:12 crc kubenswrapper[4612]: I1203 08:51:12.272461 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c9bf5d979-4pmzw_9c9ba35b-856d-4e2e-8fa4-b52ee7c9ad16/barbican-worker-log/0.log" Dec 03 08:51:12 crc kubenswrapper[4612]: I1203 08:51:12.501578 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-kmtf8_8d491f1b-5bbf-4508-8ddc-2e986613d792/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:12 crc kubenswrapper[4612]: I1203 08:51:12.623938 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3867ea54-d402-4641-936e-9038ce646012/ceilometer-central-agent/0.log" Dec 03 08:51:12 crc kubenswrapper[4612]: I1203 08:51:12.722681 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3867ea54-d402-4641-936e-9038ce646012/ceilometer-notification-agent/0.log" Dec 03 08:51:12 crc kubenswrapper[4612]: I1203 08:51:12.765261 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3867ea54-d402-4641-936e-9038ce646012/proxy-httpd/0.log" Dec 03 08:51:12 crc kubenswrapper[4612]: I1203 08:51:12.833237 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3867ea54-d402-4641-936e-9038ce646012/sg-core/0.log" Dec 03 08:51:13 crc kubenswrapper[4612]: I1203 08:51:13.244559 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_751ad1b4-cd3c-4616-99ed-9b19fee06ae8/cinder-api-log/0.log" Dec 03 08:51:13 crc kubenswrapper[4612]: I1203 08:51:13.311086 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_751ad1b4-cd3c-4616-99ed-9b19fee06ae8/cinder-api/0.log" Dec 03 08:51:13 crc kubenswrapper[4612]: I1203 08:51:13.495903 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_df089922-13b4-43d5-beaf-8dff66c6e7cb/cinder-scheduler/0.log" Dec 03 08:51:13 crc kubenswrapper[4612]: I1203 08:51:13.601404 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_df089922-13b4-43d5-beaf-8dff66c6e7cb/probe/0.log" Dec 03 08:51:13 crc kubenswrapper[4612]: I1203 08:51:13.764282 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-m45rk_27f1ab42-07b8-4697-ae0b-0afc5cb72e06/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:13 crc kubenswrapper[4612]: I1203 08:51:13.908806 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-l892k_f20be1ec-5ef4-4559-8a86-e857886c0856/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:14 crc kubenswrapper[4612]: I1203 08:51:14.000553 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-kcpm2_231aa396-d447-46be-b443-03de13ee8d90/init/0.log" Dec 03 08:51:14 crc kubenswrapper[4612]: I1203 08:51:14.317245 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-rjkjv_8d6c8be7-a683-465a-9894-08b5ca61791e/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:14 crc kubenswrapper[4612]: I1203 08:51:14.354283 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-kcpm2_231aa396-d447-46be-b443-03de13ee8d90/init/0.log" Dec 03 08:51:14 crc kubenswrapper[4612]: I1203 08:51:14.421833 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-kcpm2_231aa396-d447-46be-b443-03de13ee8d90/dnsmasq-dns/0.log" Dec 03 08:51:14 crc kubenswrapper[4612]: I1203 08:51:14.611083 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_33add78e-3ec1-42dd-90aa-9df9f53028b3/glance-httpd/0.log" Dec 03 08:51:14 crc kubenswrapper[4612]: I1203 08:51:14.662430 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_33add78e-3ec1-42dd-90aa-9df9f53028b3/glance-log/0.log" Dec 03 08:51:14 crc kubenswrapper[4612]: I1203 08:51:14.833292 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f68a55b9-312b-42b5-a0ac-ffe92a4e81b8/glance-httpd/0.log" Dec 03 08:51:14 crc kubenswrapper[4612]: I1203 08:51:14.904638 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f68a55b9-312b-42b5-a0ac-ffe92a4e81b8/glance-log/0.log" Dec 03 08:51:15 crc kubenswrapper[4612]: I1203 08:51:15.051185 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-55fc5c6c94-pjh5s_29d52104-a465-4ca0-a040-d9dba9e47600/horizon/1.log" Dec 03 08:51:15 crc kubenswrapper[4612]: I1203 08:51:15.142517 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-55fc5c6c94-pjh5s_29d52104-a465-4ca0-a040-d9dba9e47600/horizon/0.log" Dec 03 08:51:15 crc kubenswrapper[4612]: I1203 08:51:15.558619 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-2r2kj_237ab75b-84d9-4bd7-b235-2073221081f2/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:15 crc kubenswrapper[4612]: I1203 08:51:15.813557 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-g56jm_eeb3ffee-5657-4c45-b0f5-4052d9cbb2fd/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:16 crc kubenswrapper[4612]: I1203 08:51:16.018853 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-55fc5c6c94-pjh5s_29d52104-a465-4ca0-a040-d9dba9e47600/horizon-log/0.log" Dec 03 08:51:16 crc kubenswrapper[4612]: I1203 08:51:16.252595 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29412481-m7pjg_9594636b-c035-4a21-94d7-b54b4e73ef55/keystone-cron/0.log" Dec 03 08:51:16 crc kubenswrapper[4612]: I1203 08:51:16.374057 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_67686a82-cd3d-4b98-ab0f-b2e37c74a12f/kube-state-metrics/0.log" Dec 03 08:51:16 crc kubenswrapper[4612]: I1203 08:51:16.585377 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-664f766786-5w95f_0b3c004c-b094-4f83-b5c0-35fd59313980/keystone-api/0.log" Dec 03 08:51:17 crc kubenswrapper[4612]: I1203 08:51:17.138675 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-6xzgv_7f0ab99d-079d-4eda-8308-33f1a44d5c3b/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:17 crc kubenswrapper[4612]: I1203 08:51:17.415487 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-bsn5s_fe4761e2-fbfe-473c-bc56-fafd2d11559b/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:17 crc kubenswrapper[4612]: I1203 08:51:17.658687 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6f7b8b49c9-7g7fc_d86bd30e-374a-4a76-be08-89a4e3310b61/neutron-httpd/0.log" Dec 03 08:51:17 crc kubenswrapper[4612]: I1203 08:51:17.782259 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6f7b8b49c9-7g7fc_d86bd30e-374a-4a76-be08-89a4e3310b61/neutron-api/0.log" Dec 03 08:51:18 crc kubenswrapper[4612]: I1203 08:51:18.503123 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_e95122bf-4ece-43ec-9974-c8388713d7d6/nova-cell0-conductor-conductor/0.log" Dec 03 08:51:18 crc kubenswrapper[4612]: I1203 08:51:18.744689 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_cee4bc96-2455-41b3-a3cc-d743fab6e761/nova-cell1-conductor-conductor/0.log" Dec 03 08:51:19 crc kubenswrapper[4612]: I1203 08:51:19.203145 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_513ba267-34e9-4883-9eb1-55f5441813db/nova-cell1-novncproxy-novncproxy/0.log" Dec 03 08:51:19 crc kubenswrapper[4612]: I1203 08:51:19.294716 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_bfb6ea50-d549-413b-b2b0-70f5d0e38954/nova-api-log/0.log" Dec 03 08:51:19 crc kubenswrapper[4612]: I1203 08:51:19.445755 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-sc9jx_f9b6eb07-a99d-4365-b819-81f008e2018d/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:19 crc kubenswrapper[4612]: I1203 08:51:19.659350 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_28910ce9-c2c5-484e-bacd-9170253c2e83/nova-metadata-log/0.log" Dec 03 08:51:19 crc kubenswrapper[4612]: I1203 08:51:19.785908 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_bfb6ea50-d549-413b-b2b0-70f5d0e38954/nova-api-api/0.log" Dec 03 08:51:20 crc kubenswrapper[4612]: I1203 08:51:20.060724 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_af40404d-eb38-4281-ae78-fa546de7d6a2/mysql-bootstrap/0.log" Dec 03 08:51:20 crc kubenswrapper[4612]: I1203 08:51:20.573317 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_af40404d-eb38-4281-ae78-fa546de7d6a2/galera/0.log" Dec 03 08:51:20 crc kubenswrapper[4612]: I1203 08:51:20.629106 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_af40404d-eb38-4281-ae78-fa546de7d6a2/mysql-bootstrap/0.log" Dec 03 08:51:20 crc kubenswrapper[4612]: I1203 08:51:20.681528 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_cf66f76e-bca2-466f-b672-bfe680810c3e/nova-scheduler-scheduler/0.log" Dec 03 08:51:20 crc kubenswrapper[4612]: I1203 08:51:20.949788 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_10aae93b-2a6b-4a5a-a27e-9c2714777dfb/mysql-bootstrap/0.log" Dec 03 08:51:21 crc kubenswrapper[4612]: I1203 08:51:21.212263 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_10aae93b-2a6b-4a5a-a27e-9c2714777dfb/mysql-bootstrap/0.log" Dec 03 08:51:21 crc kubenswrapper[4612]: I1203 08:51:21.325016 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_10aae93b-2a6b-4a5a-a27e-9c2714777dfb/galera/0.log" Dec 03 08:51:21 crc kubenswrapper[4612]: I1203 08:51:21.435860 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_6b63827f-76e8-454f-9243-6c05f9e3c2fd/openstackclient/0.log" Dec 03 08:51:21 crc kubenswrapper[4612]: I1203 08:51:21.541272 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-j7748_385edacb-e835-42f4-a521-7c321043b989/ovn-controller/0.log" Dec 03 08:51:21 crc kubenswrapper[4612]: I1203 08:51:21.802155 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-zc5qp_07101baf-17d1-4e64-8c8a-4ee57ab33873/openstack-network-exporter/0.log" Dec 03 08:51:21 crc kubenswrapper[4612]: I1203 08:51:21.829980 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_28910ce9-c2c5-484e-bacd-9170253c2e83/nova-metadata-metadata/0.log" Dec 03 08:51:22 crc kubenswrapper[4612]: I1203 08:51:22.018055 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ng85x_d487d317-a480-4234-9db2-b9018c5c5e38/ovsdb-server-init/0.log" Dec 03 08:51:22 crc kubenswrapper[4612]: I1203 08:51:22.033398 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_b894480f-fa85-4215-8599-23743aa1c262/memcached/0.log" Dec 03 08:51:22 crc kubenswrapper[4612]: I1203 08:51:22.201538 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ng85x_d487d317-a480-4234-9db2-b9018c5c5e38/ovs-vswitchd/0.log" Dec 03 08:51:22 crc kubenswrapper[4612]: I1203 08:51:22.265141 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ng85x_d487d317-a480-4234-9db2-b9018c5c5e38/ovsdb-server/0.log" Dec 03 08:51:22 crc kubenswrapper[4612]: I1203 08:51:22.297722 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ng85x_d487d317-a480-4234-9db2-b9018c5c5e38/ovsdb-server-init/0.log" Dec 03 08:51:22 crc kubenswrapper[4612]: I1203 08:51:22.364638 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-6ztrk_44b9b74b-2985-47c3-aec4-304bfc2d6122/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:22 crc kubenswrapper[4612]: I1203 08:51:22.506233 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_d38a92e9-cf02-4966-9bb1-4ea642490d00/ovn-northd/0.log" Dec 03 08:51:22 crc kubenswrapper[4612]: I1203 08:51:22.560819 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_d38a92e9-cf02-4966-9bb1-4ea642490d00/openstack-network-exporter/0.log" Dec 03 08:51:22 crc kubenswrapper[4612]: I1203 08:51:22.591267 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_9463ced8-f487-4a16-9af3-07b736ca556c/openstack-network-exporter/0.log" Dec 03 08:51:23 crc kubenswrapper[4612]: I1203 08:51:23.172542 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_9463ced8-f487-4a16-9af3-07b736ca556c/ovsdbserver-nb/0.log" Dec 03 08:51:23 crc kubenswrapper[4612]: I1203 08:51:23.356678 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_76717115-6292-47aa-bc1a-90c5e618967b/openstack-network-exporter/0.log" Dec 03 08:51:23 crc kubenswrapper[4612]: I1203 08:51:23.374142 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_76717115-6292-47aa-bc1a-90c5e618967b/ovsdbserver-sb/0.log" Dec 03 08:51:23 crc kubenswrapper[4612]: I1203 08:51:23.639597 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5cd4567696-hsfd9_1f9cee01-af98-4b34-b263-ae543c237e0b/placement-api/0.log" Dec 03 08:51:23 crc kubenswrapper[4612]: I1203 08:51:23.689675 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6ed2435b-f44d-4468-baec-035755359147/setup-container/0.log" Dec 03 08:51:23 crc kubenswrapper[4612]: I1203 08:51:23.782377 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5cd4567696-hsfd9_1f9cee01-af98-4b34-b263-ae543c237e0b/placement-log/0.log" Dec 03 08:51:24 crc kubenswrapper[4612]: I1203 08:51:24.053315 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6ed2435b-f44d-4468-baec-035755359147/setup-container/0.log" Dec 03 08:51:24 crc kubenswrapper[4612]: I1203 08:51:24.249498 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6ed2435b-f44d-4468-baec-035755359147/rabbitmq/0.log" Dec 03 08:51:24 crc kubenswrapper[4612]: I1203 08:51:24.317042 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_08764b2d-6ed4-4495-8338-03d2af8dcbdd/setup-container/0.log" Dec 03 08:51:24 crc kubenswrapper[4612]: I1203 08:51:24.880082 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_08764b2d-6ed4-4495-8338-03d2af8dcbdd/rabbitmq/0.log" Dec 03 08:51:24 crc kubenswrapper[4612]: I1203 08:51:24.893546 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_08764b2d-6ed4-4495-8338-03d2af8dcbdd/setup-container/0.log" Dec 03 08:51:24 crc kubenswrapper[4612]: I1203 08:51:24.898553 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-bnzwf_4ad5399d-3f69-4f51-bc8c-9245e721bcfd/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:25 crc kubenswrapper[4612]: I1203 08:51:25.042416 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-qvcck_41c7b385-5686-4d88-a86a-072eb493e1a2/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:25 crc kubenswrapper[4612]: I1203 08:51:25.188106 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-8z2wr_18af4e51-62fc-4bba-8afd-c8b743e70852/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:25 crc kubenswrapper[4612]: I1203 08:51:25.213082 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-vwxdw_18e6e5c3-b90b-429e-9b89-c94e3f20ecdd/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:25 crc kubenswrapper[4612]: I1203 08:51:25.325789 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-bkpsr_7a053178-34a7-49d7-8119-09c53336a553/ssh-known-hosts-edpm-deployment/0.log" Dec 03 08:51:25 crc kubenswrapper[4612]: I1203 08:51:25.510564 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-7b57f75fd5-642lv_2528552f-220d-4b33-990a-7793d5d8987a/proxy-httpd/0.log" Dec 03 08:51:25 crc kubenswrapper[4612]: I1203 08:51:25.598046 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-7b57f75fd5-642lv_2528552f-220d-4b33-990a-7793d5d8987a/proxy-server/0.log" Dec 03 08:51:25 crc kubenswrapper[4612]: I1203 08:51:25.762506 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-p474m_0aff4c7e-d189-4658-b1a6-388353c8dfa8/swift-ring-rebalance/0.log" Dec 03 08:51:25 crc kubenswrapper[4612]: I1203 08:51:25.767466 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/account-auditor/0.log" Dec 03 08:51:25 crc kubenswrapper[4612]: I1203 08:51:25.851782 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/account-reaper/0.log" Dec 03 08:51:25 crc kubenswrapper[4612]: I1203 08:51:25.905996 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/account-replicator/0.log" Dec 03 08:51:25 crc kubenswrapper[4612]: I1203 08:51:25.948766 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/account-server/0.log" Dec 03 08:51:25 crc kubenswrapper[4612]: I1203 08:51:25.977930 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/container-auditor/0.log" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.075643 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/container-replicator/0.log" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.089624 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:51:26 crc kubenswrapper[4612]: E1203 08:51:26.090419 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.093252 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/container-server/0.log" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.152777 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/container-updater/0.log" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.225525 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/object-auditor/0.log" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.286155 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/object-expirer/0.log" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.363057 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/object-replicator/0.log" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.368966 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/object-server/0.log" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.417763 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/object-updater/0.log" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.481366 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/swift-recon-cron/0.log" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.483208 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_199ac340-6fa4-414c-b9b1-80aff6965bc0/rsync/0.log" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.657440 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-4gccm_063f10ac-9f99-4bae-9eae-ec9d2ebb773f/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.703253 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_5cc1684d-023d-46c3-8f87-3e91941a34e9/tempest-tests-tempest-tests-runner/0.log" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.830307 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_562541dd-f13c-47e5-9411-cf0d9a7c0a54/test-operator-logs-container/0.log" Dec 03 08:51:26 crc kubenswrapper[4612]: I1203 08:51:26.921165 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-p95mm_7624e359-51e1-46df-829a-12aebc8d3688/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 08:51:37 crc kubenswrapper[4612]: I1203 08:51:37.097008 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:51:37 crc kubenswrapper[4612]: E1203 08:51:37.097773 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:51:47 crc kubenswrapper[4612]: I1203 08:51:47.215114 4612 scope.go:117] "RemoveContainer" containerID="6733d858d81d0e9346abce2cef00a3b41918b9e114b0603cdbcaaf27a59c7314" Dec 03 08:51:47 crc kubenswrapper[4612]: I1203 08:51:47.254248 4612 scope.go:117] "RemoveContainer" containerID="da4bc4f5d662f50c34f45c0aa1b56e6fdccc92cf32e6f965037385b021bed477" Dec 03 08:51:47 crc kubenswrapper[4612]: I1203 08:51:47.295662 4612 scope.go:117] "RemoveContainer" containerID="99689543258c26a2a0df7eca26c9d54109d7a77305121b83fa949a075eddadc3" Dec 03 08:51:48 crc kubenswrapper[4612]: I1203 08:51:48.089645 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:51:48 crc kubenswrapper[4612]: E1203 08:51:48.090161 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:51:55 crc kubenswrapper[4612]: I1203 08:51:55.707671 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k_acb105f8-b0d1-48ce-81cf-e0f2f1a6202d/util/0.log" Dec 03 08:51:55 crc kubenswrapper[4612]: I1203 08:51:55.941514 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k_acb105f8-b0d1-48ce-81cf-e0f2f1a6202d/pull/0.log" Dec 03 08:51:55 crc kubenswrapper[4612]: I1203 08:51:55.948484 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k_acb105f8-b0d1-48ce-81cf-e0f2f1a6202d/util/0.log" Dec 03 08:51:55 crc kubenswrapper[4612]: I1203 08:51:55.978333 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k_acb105f8-b0d1-48ce-81cf-e0f2f1a6202d/pull/0.log" Dec 03 08:51:56 crc kubenswrapper[4612]: I1203 08:51:56.132602 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k_acb105f8-b0d1-48ce-81cf-e0f2f1a6202d/util/0.log" Dec 03 08:51:56 crc kubenswrapper[4612]: I1203 08:51:56.250593 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k_acb105f8-b0d1-48ce-81cf-e0f2f1a6202d/extract/0.log" Dec 03 08:51:56 crc kubenswrapper[4612]: I1203 08:51:56.252869 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32f88f8a8031190ec7e9314e78f13aa33ad7359a1edee103b7a2d5e9b9pjw7k_acb105f8-b0d1-48ce-81cf-e0f2f1a6202d/pull/0.log" Dec 03 08:51:56 crc kubenswrapper[4612]: I1203 08:51:56.359823 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-n2rzg_3d3cecf1-2f48-4b22-9350-870d25e786ef/kube-rbac-proxy/0.log" Dec 03 08:51:56 crc kubenswrapper[4612]: I1203 08:51:56.500031 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-n2rzg_3d3cecf1-2f48-4b22-9350-870d25e786ef/manager/0.log" Dec 03 08:51:56 crc kubenswrapper[4612]: I1203 08:51:56.534836 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-2pxgx_3c3eb81e-314d-486d-afa4-443f33c54510/kube-rbac-proxy/0.log" Dec 03 08:51:56 crc kubenswrapper[4612]: I1203 08:51:56.627138 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-2pxgx_3c3eb81e-314d-486d-afa4-443f33c54510/manager/0.log" Dec 03 08:51:56 crc kubenswrapper[4612]: I1203 08:51:56.770050 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-4cjnp_9f7c1634-c25d-4fc1-92bd-d95ef05c7868/manager/0.log" Dec 03 08:51:56 crc kubenswrapper[4612]: I1203 08:51:56.790186 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-4cjnp_9f7c1634-c25d-4fc1-92bd-d95ef05c7868/kube-rbac-proxy/0.log" Dec 03 08:51:56 crc kubenswrapper[4612]: I1203 08:51:56.965680 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-tklpk_d75ef15b-d718-436d-b570-21416a0c4021/kube-rbac-proxy/0.log" Dec 03 08:51:57 crc kubenswrapper[4612]: I1203 08:51:57.068347 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-tklpk_d75ef15b-d718-436d-b570-21416a0c4021/manager/0.log" Dec 03 08:51:57 crc kubenswrapper[4612]: I1203 08:51:57.134614 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-5rg69_b8715491-d469-4ade-8434-765685a955db/kube-rbac-proxy/0.log" Dec 03 08:51:57 crc kubenswrapper[4612]: I1203 08:51:57.169527 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-5rg69_b8715491-d469-4ade-8434-765685a955db/manager/0.log" Dec 03 08:51:57 crc kubenswrapper[4612]: I1203 08:51:57.357669 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-bs99s_c2db30ef-0db3-44d4-b276-3b81195d4962/kube-rbac-proxy/0.log" Dec 03 08:51:57 crc kubenswrapper[4612]: I1203 08:51:57.436752 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-rdf2f_d70b0d51-8225-4d2b-b128-aeda29446ab9/kube-rbac-proxy/0.log" Dec 03 08:51:57 crc kubenswrapper[4612]: I1203 08:51:57.443588 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-bs99s_c2db30ef-0db3-44d4-b276-3b81195d4962/manager/0.log" Dec 03 08:51:57 crc kubenswrapper[4612]: I1203 08:51:57.684356 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-6mbrd_39305f1e-8b3f-43aa-97d4-48410cc7fe91/kube-rbac-proxy/0.log" Dec 03 08:51:57 crc kubenswrapper[4612]: I1203 08:51:57.910537 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-flkb9_58adadbb-3706-4f8c-be33-31836f4860e5/kube-rbac-proxy/0.log" Dec 03 08:51:58 crc kubenswrapper[4612]: I1203 08:51:58.294570 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-6mbrd_39305f1e-8b3f-43aa-97d4-48410cc7fe91/manager/0.log" Dec 03 08:51:58 crc kubenswrapper[4612]: I1203 08:51:58.424401 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-rdf2f_d70b0d51-8225-4d2b-b128-aeda29446ab9/manager/0.log" Dec 03 08:51:58 crc kubenswrapper[4612]: I1203 08:51:58.710434 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-flkb9_58adadbb-3706-4f8c-be33-31836f4860e5/manager/0.log" Dec 03 08:51:58 crc kubenswrapper[4612]: I1203 08:51:58.847870 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-d98bb_790a3a61-40c9-4360-b7b6-9f08edbec437/kube-rbac-proxy/0.log" Dec 03 08:51:58 crc kubenswrapper[4612]: I1203 08:51:58.904911 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-d98bb_790a3a61-40c9-4360-b7b6-9f08edbec437/manager/0.log" Dec 03 08:51:59 crc kubenswrapper[4612]: I1203 08:51:59.045190 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-m898g_e4cadc4e-ebfd-4886-83a2-1caf4aef2b68/kube-rbac-proxy/0.log" Dec 03 08:51:59 crc kubenswrapper[4612]: I1203 08:51:59.113769 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-m898g_e4cadc4e-ebfd-4886-83a2-1caf4aef2b68/manager/0.log" Dec 03 08:51:59 crc kubenswrapper[4612]: I1203 08:51:59.172679 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-r75nm_d9951bd4-0756-4c79-96b0-ceaac8a1e51a/kube-rbac-proxy/0.log" Dec 03 08:51:59 crc kubenswrapper[4612]: I1203 08:51:59.326986 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-r75nm_d9951bd4-0756-4c79-96b0-ceaac8a1e51a/manager/0.log" Dec 03 08:51:59 crc kubenswrapper[4612]: I1203 08:51:59.382225 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-4vp27_24a39a3f-a75f-4029-b861-cf683db5aae2/kube-rbac-proxy/0.log" Dec 03 08:51:59 crc kubenswrapper[4612]: I1203 08:51:59.495373 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-4vp27_24a39a3f-a75f-4029-b861-cf683db5aae2/manager/0.log" Dec 03 08:51:59 crc kubenswrapper[4612]: I1203 08:51:59.625876 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-b2s4p_ec8611ec-2e0f-4906-af03-7dc350e7e783/manager/0.log" Dec 03 08:51:59 crc kubenswrapper[4612]: I1203 08:51:59.628635 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-b2s4p_ec8611ec-2e0f-4906-af03-7dc350e7e783/kube-rbac-proxy/0.log" Dec 03 08:51:59 crc kubenswrapper[4612]: I1203 08:51:59.766595 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9_a9a1ffeb-b3b4-4b07-911b-b829962b6827/kube-rbac-proxy/0.log" Dec 03 08:51:59 crc kubenswrapper[4612]: I1203 08:51:59.806484 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4k77z9_a9a1ffeb-b3b4-4b07-911b-b829962b6827/manager/0.log" Dec 03 08:52:00 crc kubenswrapper[4612]: I1203 08:52:00.089458 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:52:00 crc kubenswrapper[4612]: E1203 08:52:00.089721 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:52:00 crc kubenswrapper[4612]: I1203 08:52:00.271553 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-8f4757f6b-g7ncc_097f95f4-4fc4-43c0-aefd-da8b3c0111f3/operator/0.log" Dec 03 08:52:00 crc kubenswrapper[4612]: I1203 08:52:00.437565 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-vf48w_5c654dcd-f507-4d31-b3d7-7230cc7cb086/registry-server/0.log" Dec 03 08:52:00 crc kubenswrapper[4612]: I1203 08:52:00.570531 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-jbfkr_99188e23-da4a-4d43-8778-a2a0b9e962dc/kube-rbac-proxy/0.log" Dec 03 08:52:00 crc kubenswrapper[4612]: I1203 08:52:00.818134 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-w62v7_1ac4eade-01c8-4323-8796-6b2d39a7ee36/kube-rbac-proxy/0.log" Dec 03 08:52:00 crc kubenswrapper[4612]: I1203 08:52:00.845733 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-jbfkr_99188e23-da4a-4d43-8778-a2a0b9e962dc/manager/0.log" Dec 03 08:52:00 crc kubenswrapper[4612]: I1203 08:52:00.926135 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-w62v7_1ac4eade-01c8-4323-8796-6b2d39a7ee36/manager/0.log" Dec 03 08:52:01 crc kubenswrapper[4612]: I1203 08:52:01.162199 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-64f7f94cfd-bdkfw_f6260167-bcb0-4f9e-8a44-6cd47d248296/manager/0.log" Dec 03 08:52:01 crc kubenswrapper[4612]: I1203 08:52:01.181552 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-kv6n4_5a8351bf-c4cf-40fc-8df9-22b3064770a3/kube-rbac-proxy/0.log" Dec 03 08:52:01 crc kubenswrapper[4612]: I1203 08:52:01.234449 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-6fzpq_8fd04295-8c24-459f-b2d5-1fee88165e78/operator/0.log" Dec 03 08:52:01 crc kubenswrapper[4612]: I1203 08:52:01.402980 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-jbbw2_2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2/manager/0.log" Dec 03 08:52:01 crc kubenswrapper[4612]: I1203 08:52:01.425914 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-jbbw2_2c84a69e-5ccd-4f4e-964d-ddf44e8e83b2/kube-rbac-proxy/0.log" Dec 03 08:52:01 crc kubenswrapper[4612]: I1203 08:52:01.434280 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-kv6n4_5a8351bf-c4cf-40fc-8df9-22b3064770a3/manager/0.log" Dec 03 08:52:01 crc kubenswrapper[4612]: I1203 08:52:01.598014 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-snwlr_b52c7da9-b392-448b-a04a-1afa333df442/kube-rbac-proxy/0.log" Dec 03 08:52:01 crc kubenswrapper[4612]: I1203 08:52:01.647183 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-snwlr_b52c7da9-b392-448b-a04a-1afa333df442/manager/0.log" Dec 03 08:52:01 crc kubenswrapper[4612]: I1203 08:52:01.712419 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-495qs_797a523a-540b-4b10-a294-0543192f0c56/kube-rbac-proxy/0.log" Dec 03 08:52:01 crc kubenswrapper[4612]: I1203 08:52:01.818617 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-495qs_797a523a-540b-4b10-a294-0543192f0c56/manager/0.log" Dec 03 08:52:14 crc kubenswrapper[4612]: I1203 08:52:14.092426 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:52:14 crc kubenswrapper[4612]: E1203 08:52:14.093210 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:52:21 crc kubenswrapper[4612]: I1203 08:52:21.818766 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-dzht7_db28a622-4b74-49e9-bd91-6f2a253583fe/control-plane-machine-set-operator/0.log" Dec 03 08:52:22 crc kubenswrapper[4612]: I1203 08:52:22.041745 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9vwq6_749db599-e347-4a7b-9ff8-9c33514ee64a/machine-api-operator/0.log" Dec 03 08:52:22 crc kubenswrapper[4612]: I1203 08:52:22.073843 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9vwq6_749db599-e347-4a7b-9ff8-9c33514ee64a/kube-rbac-proxy/0.log" Dec 03 08:52:28 crc kubenswrapper[4612]: I1203 08:52:28.089614 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:52:28 crc kubenswrapper[4612]: E1203 08:52:28.090417 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:52:35 crc kubenswrapper[4612]: I1203 08:52:35.432690 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-4kwk4_a8b94ba1-6d11-4835-9ee9-e1756681dc3e/cert-manager-controller/0.log" Dec 03 08:52:35 crc kubenswrapper[4612]: I1203 08:52:35.690259 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-6gdmj_f221cbcd-d224-483f-b688-6d877302a502/cert-manager-cainjector/0.log" Dec 03 08:52:35 crc kubenswrapper[4612]: I1203 08:52:35.745205 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-28nxc_ac4674d9-ae54-48a4-858b-75a91546ddd9/cert-manager-webhook/0.log" Dec 03 08:52:43 crc kubenswrapper[4612]: I1203 08:52:43.094408 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:52:43 crc kubenswrapper[4612]: E1203 08:52:43.095105 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:52:50 crc kubenswrapper[4612]: I1203 08:52:50.695272 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-qgl65_bd325177-e3fc-476e-b59f-363f1bc2fe0a/nmstate-console-plugin/0.log" Dec 03 08:52:51 crc kubenswrapper[4612]: I1203 08:52:51.005370 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-4mtzb_3dfc52ec-c1e6-4162-bccf-6fb5a855212f/nmstate-handler/0.log" Dec 03 08:52:51 crc kubenswrapper[4612]: I1203 08:52:51.071190 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-7dp5q_0eea55b7-ecfe-4fc9-bff2-061da172743a/kube-rbac-proxy/0.log" Dec 03 08:52:51 crc kubenswrapper[4612]: I1203 08:52:51.101463 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-7dp5q_0eea55b7-ecfe-4fc9-bff2-061da172743a/nmstate-metrics/0.log" Dec 03 08:52:51 crc kubenswrapper[4612]: I1203 08:52:51.313559 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-vlsmd_b1d96da3-e5c3-46d4-b29b-6121d6e4d112/nmstate-operator/0.log" Dec 03 08:52:51 crc kubenswrapper[4612]: I1203 08:52:51.375774 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-hnhsj_a69b0d1e-d485-4424-a5a0-607e997bbaf6/nmstate-webhook/0.log" Dec 03 08:52:55 crc kubenswrapper[4612]: I1203 08:52:55.089780 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:52:55 crc kubenswrapper[4612]: E1203 08:52:55.090758 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:53:09 crc kubenswrapper[4612]: I1203 08:53:09.093874 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:53:09 crc kubenswrapper[4612]: E1203 08:53:09.094817 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:53:10 crc kubenswrapper[4612]: I1203 08:53:10.107888 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-gnqmc_a982bc05-54be-4a0b-8e04-1e566601060d/kube-rbac-proxy/0.log" Dec 03 08:53:10 crc kubenswrapper[4612]: I1203 08:53:10.275812 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-gnqmc_a982bc05-54be-4a0b-8e04-1e566601060d/controller/0.log" Dec 03 08:53:10 crc kubenswrapper[4612]: I1203 08:53:10.541790 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-frr-files/0.log" Dec 03 08:53:10 crc kubenswrapper[4612]: I1203 08:53:10.780626 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-reloader/0.log" Dec 03 08:53:10 crc kubenswrapper[4612]: I1203 08:53:10.805112 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-metrics/0.log" Dec 03 08:53:10 crc kubenswrapper[4612]: I1203 08:53:10.852544 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-frr-files/0.log" Dec 03 08:53:10 crc kubenswrapper[4612]: I1203 08:53:10.918903 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-reloader/0.log" Dec 03 08:53:11 crc kubenswrapper[4612]: I1203 08:53:11.138820 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-metrics/0.log" Dec 03 08:53:11 crc kubenswrapper[4612]: I1203 08:53:11.189585 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-frr-files/0.log" Dec 03 08:53:11 crc kubenswrapper[4612]: I1203 08:53:11.190469 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-reloader/0.log" Dec 03 08:53:11 crc kubenswrapper[4612]: I1203 08:53:11.231250 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-metrics/0.log" Dec 03 08:53:11 crc kubenswrapper[4612]: I1203 08:53:11.435533 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-reloader/0.log" Dec 03 08:53:11 crc kubenswrapper[4612]: I1203 08:53:11.448086 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/controller/0.log" Dec 03 08:53:11 crc kubenswrapper[4612]: I1203 08:53:11.475715 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-metrics/0.log" Dec 03 08:53:11 crc kubenswrapper[4612]: I1203 08:53:11.489158 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/cp-frr-files/0.log" Dec 03 08:53:11 crc kubenswrapper[4612]: I1203 08:53:11.646274 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/frr-metrics/0.log" Dec 03 08:53:11 crc kubenswrapper[4612]: I1203 08:53:11.695127 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/kube-rbac-proxy-frr/0.log" Dec 03 08:53:11 crc kubenswrapper[4612]: I1203 08:53:11.763461 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/kube-rbac-proxy/0.log" Dec 03 08:53:12 crc kubenswrapper[4612]: I1203 08:53:12.010644 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/reloader/0.log" Dec 03 08:53:12 crc kubenswrapper[4612]: I1203 08:53:12.096419 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-5r284_fd2ac083-0876-4383-ba05-5493cd25e480/frr-k8s-webhook-server/0.log" Dec 03 08:53:12 crc kubenswrapper[4612]: I1203 08:53:12.781884 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-67fc746f5d-s9gvn_bd467808-b93d-47ee-bdf3-8e6f29bf3506/manager/0.log" Dec 03 08:53:12 crc kubenswrapper[4612]: I1203 08:53:12.794957 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6b85868c59-dznbc_ac4cb1e7-b3c7-4b39-a038-49071cb6ac2d/webhook-server/0.log" Dec 03 08:53:12 crc kubenswrapper[4612]: I1203 08:53:12.905633 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-djbsb_77aa8be3-2081-4ed8-8507-3f466dbef21c/frr/0.log" Dec 03 08:53:13 crc kubenswrapper[4612]: I1203 08:53:13.135066 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-6wm95_410e9076-5ebb-45a7-880a-77bffe01911b/kube-rbac-proxy/0.log" Dec 03 08:53:13 crc kubenswrapper[4612]: I1203 08:53:13.271956 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-6wm95_410e9076-5ebb-45a7-880a-77bffe01911b/speaker/0.log" Dec 03 08:53:22 crc kubenswrapper[4612]: I1203 08:53:22.090087 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:53:22 crc kubenswrapper[4612]: E1203 08:53:22.090974 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:53:28 crc kubenswrapper[4612]: I1203 08:53:28.553814 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr_4626e1e5-23a3-47d8-98ae-3a4ee0c39c92/util/0.log" Dec 03 08:53:28 crc kubenswrapper[4612]: I1203 08:53:28.753276 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr_4626e1e5-23a3-47d8-98ae-3a4ee0c39c92/pull/0.log" Dec 03 08:53:28 crc kubenswrapper[4612]: I1203 08:53:28.841350 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr_4626e1e5-23a3-47d8-98ae-3a4ee0c39c92/util/0.log" Dec 03 08:53:28 crc kubenswrapper[4612]: I1203 08:53:28.878629 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr_4626e1e5-23a3-47d8-98ae-3a4ee0c39c92/pull/0.log" Dec 03 08:53:29 crc kubenswrapper[4612]: I1203 08:53:29.089780 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr_4626e1e5-23a3-47d8-98ae-3a4ee0c39c92/pull/0.log" Dec 03 08:53:29 crc kubenswrapper[4612]: I1203 08:53:29.157658 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr_4626e1e5-23a3-47d8-98ae-3a4ee0c39c92/util/0.log" Dec 03 08:53:29 crc kubenswrapper[4612]: I1203 08:53:29.158147 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqp6hr_4626e1e5-23a3-47d8-98ae-3a4ee0c39c92/extract/0.log" Dec 03 08:53:29 crc kubenswrapper[4612]: I1203 08:53:29.809792 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72_2fe33ad3-592c-48e5-83ec-a919da42fd49/util/0.log" Dec 03 08:53:30 crc kubenswrapper[4612]: I1203 08:53:30.019482 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72_2fe33ad3-592c-48e5-83ec-a919da42fd49/pull/0.log" Dec 03 08:53:30 crc kubenswrapper[4612]: I1203 08:53:30.115140 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72_2fe33ad3-592c-48e5-83ec-a919da42fd49/util/0.log" Dec 03 08:53:30 crc kubenswrapper[4612]: I1203 08:53:30.156976 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72_2fe33ad3-592c-48e5-83ec-a919da42fd49/pull/0.log" Dec 03 08:53:30 crc kubenswrapper[4612]: I1203 08:53:30.279623 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72_2fe33ad3-592c-48e5-83ec-a919da42fd49/extract/0.log" Dec 03 08:53:30 crc kubenswrapper[4612]: I1203 08:53:30.313588 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72_2fe33ad3-592c-48e5-83ec-a919da42fd49/pull/0.log" Dec 03 08:53:30 crc kubenswrapper[4612]: I1203 08:53:30.375071 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839kr72_2fe33ad3-592c-48e5-83ec-a919da42fd49/util/0.log" Dec 03 08:53:30 crc kubenswrapper[4612]: I1203 08:53:30.504989 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ct4c4_6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619/extract-utilities/0.log" Dec 03 08:53:30 crc kubenswrapper[4612]: I1203 08:53:30.681645 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ct4c4_6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619/extract-content/0.log" Dec 03 08:53:30 crc kubenswrapper[4612]: I1203 08:53:30.683652 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ct4c4_6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619/extract-utilities/0.log" Dec 03 08:53:30 crc kubenswrapper[4612]: I1203 08:53:30.684728 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ct4c4_6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619/extract-content/0.log" Dec 03 08:53:30 crc kubenswrapper[4612]: I1203 08:53:30.876509 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ct4c4_6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619/extract-utilities/0.log" Dec 03 08:53:30 crc kubenswrapper[4612]: I1203 08:53:30.883272 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ct4c4_6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619/extract-content/0.log" Dec 03 08:53:31 crc kubenswrapper[4612]: I1203 08:53:31.243841 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-x4zn7_c1ce7b50-2a84-44dc-9398-24bc9f03f745/extract-utilities/0.log" Dec 03 08:53:31 crc kubenswrapper[4612]: I1203 08:53:31.337135 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ct4c4_6f0bb622-c6e5-4c3a-93b6-8ddbb0a4c619/registry-server/0.log" Dec 03 08:53:31 crc kubenswrapper[4612]: I1203 08:53:31.380385 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-x4zn7_c1ce7b50-2a84-44dc-9398-24bc9f03f745/extract-utilities/0.log" Dec 03 08:53:31 crc kubenswrapper[4612]: I1203 08:53:31.393299 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-x4zn7_c1ce7b50-2a84-44dc-9398-24bc9f03f745/extract-content/0.log" Dec 03 08:53:31 crc kubenswrapper[4612]: I1203 08:53:31.475311 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-x4zn7_c1ce7b50-2a84-44dc-9398-24bc9f03f745/extract-content/0.log" Dec 03 08:53:31 crc kubenswrapper[4612]: I1203 08:53:31.708313 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-x4zn7_c1ce7b50-2a84-44dc-9398-24bc9f03f745/extract-content/0.log" Dec 03 08:53:31 crc kubenswrapper[4612]: I1203 08:53:31.806579 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-x4zn7_c1ce7b50-2a84-44dc-9398-24bc9f03f745/extract-utilities/0.log" Dec 03 08:53:32 crc kubenswrapper[4612]: I1203 08:53:32.086548 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-x2kxn_4db345c6-2ee2-4acf-9be4-a705bddb07fe/marketplace-operator/0.log" Dec 03 08:53:32 crc kubenswrapper[4612]: I1203 08:53:32.183481 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-npj5l_ecf12e7f-21e7-40f0-bdb4-e07c8437cef8/extract-utilities/0.log" Dec 03 08:53:32 crc kubenswrapper[4612]: I1203 08:53:32.393788 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-npj5l_ecf12e7f-21e7-40f0-bdb4-e07c8437cef8/extract-content/0.log" Dec 03 08:53:32 crc kubenswrapper[4612]: I1203 08:53:32.411507 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-x4zn7_c1ce7b50-2a84-44dc-9398-24bc9f03f745/registry-server/0.log" Dec 03 08:53:32 crc kubenswrapper[4612]: I1203 08:53:32.443459 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-npj5l_ecf12e7f-21e7-40f0-bdb4-e07c8437cef8/extract-content/0.log" Dec 03 08:53:32 crc kubenswrapper[4612]: I1203 08:53:32.447617 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-npj5l_ecf12e7f-21e7-40f0-bdb4-e07c8437cef8/extract-utilities/0.log" Dec 03 08:53:32 crc kubenswrapper[4612]: I1203 08:53:32.627191 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-npj5l_ecf12e7f-21e7-40f0-bdb4-e07c8437cef8/extract-utilities/0.log" Dec 03 08:53:32 crc kubenswrapper[4612]: I1203 08:53:32.643898 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-npj5l_ecf12e7f-21e7-40f0-bdb4-e07c8437cef8/extract-content/0.log" Dec 03 08:53:32 crc kubenswrapper[4612]: I1203 08:53:32.688930 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qbglv_a3e32e0f-ff72-43ff-8afb-54fbf1be823a/extract-utilities/0.log" Dec 03 08:53:32 crc kubenswrapper[4612]: I1203 08:53:32.917813 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qbglv_a3e32e0f-ff72-43ff-8afb-54fbf1be823a/extract-utilities/0.log" Dec 03 08:53:32 crc kubenswrapper[4612]: I1203 08:53:32.950274 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-npj5l_ecf12e7f-21e7-40f0-bdb4-e07c8437cef8/registry-server/0.log" Dec 03 08:53:32 crc kubenswrapper[4612]: I1203 08:53:32.988289 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qbglv_a3e32e0f-ff72-43ff-8afb-54fbf1be823a/extract-content/0.log" Dec 03 08:53:33 crc kubenswrapper[4612]: I1203 08:53:33.007468 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qbglv_a3e32e0f-ff72-43ff-8afb-54fbf1be823a/extract-content/0.log" Dec 03 08:53:33 crc kubenswrapper[4612]: I1203 08:53:33.219165 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qbglv_a3e32e0f-ff72-43ff-8afb-54fbf1be823a/extract-utilities/0.log" Dec 03 08:53:33 crc kubenswrapper[4612]: I1203 08:53:33.234037 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qbglv_a3e32e0f-ff72-43ff-8afb-54fbf1be823a/extract-content/0.log" Dec 03 08:53:33 crc kubenswrapper[4612]: I1203 08:53:33.732464 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qbglv_a3e32e0f-ff72-43ff-8afb-54fbf1be823a/registry-server/0.log" Dec 03 08:53:34 crc kubenswrapper[4612]: I1203 08:53:34.089917 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:53:34 crc kubenswrapper[4612]: E1203 08:53:34.090132 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:53:48 crc kubenswrapper[4612]: I1203 08:53:48.089985 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:53:48 crc kubenswrapper[4612]: E1203 08:53:48.090817 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:53:59 crc kubenswrapper[4612]: I1203 08:53:59.090259 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:53:59 crc kubenswrapper[4612]: E1203 08:53:59.094447 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:54:13 crc kubenswrapper[4612]: I1203 08:54:13.091369 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:54:13 crc kubenswrapper[4612]: E1203 08:54:13.094027 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:54:27 crc kubenswrapper[4612]: I1203 08:54:27.098174 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:54:27 crc kubenswrapper[4612]: E1203 08:54:27.100045 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:54:40 crc kubenswrapper[4612]: I1203 08:54:40.094008 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:54:40 crc kubenswrapper[4612]: E1203 08:54:40.094704 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:54:51 crc kubenswrapper[4612]: I1203 08:54:51.090489 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:54:51 crc kubenswrapper[4612]: E1203 08:54:51.092407 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:55:02 crc kubenswrapper[4612]: I1203 08:55:02.089859 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:55:02 crc kubenswrapper[4612]: E1203 08:55:02.090578 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:55:15 crc kubenswrapper[4612]: I1203 08:55:15.090359 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:55:15 crc kubenswrapper[4612]: E1203 08:55:15.091431 4612 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d8td2_openshift-machine-config-operator(bf87dca8-ceaa-424a-8074-7a63c648b84b)\"" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" Dec 03 08:55:22 crc kubenswrapper[4612]: I1203 08:55:22.919775 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jh8r6"] Dec 03 08:55:22 crc kubenswrapper[4612]: E1203 08:55:22.920899 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35d28757-ae06-4249-81ff-5e916d7d7062" containerName="container-00" Dec 03 08:55:22 crc kubenswrapper[4612]: I1203 08:55:22.920920 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="35d28757-ae06-4249-81ff-5e916d7d7062" containerName="container-00" Dec 03 08:55:22 crc kubenswrapper[4612]: I1203 08:55:22.921240 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="35d28757-ae06-4249-81ff-5e916d7d7062" containerName="container-00" Dec 03 08:55:22 crc kubenswrapper[4612]: I1203 08:55:22.923299 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:22 crc kubenswrapper[4612]: I1203 08:55:22.940972 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jh8r6"] Dec 03 08:55:23 crc kubenswrapper[4612]: I1203 08:55:23.066991 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/713e5d03-d88c-4586-8505-700d72bc5c63-utilities\") pod \"redhat-operators-jh8r6\" (UID: \"713e5d03-d88c-4586-8505-700d72bc5c63\") " pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:23 crc kubenswrapper[4612]: I1203 08:55:23.067100 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdp7m\" (UniqueName: \"kubernetes.io/projected/713e5d03-d88c-4586-8505-700d72bc5c63-kube-api-access-pdp7m\") pod \"redhat-operators-jh8r6\" (UID: \"713e5d03-d88c-4586-8505-700d72bc5c63\") " pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:23 crc kubenswrapper[4612]: I1203 08:55:23.067160 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/713e5d03-d88c-4586-8505-700d72bc5c63-catalog-content\") pod \"redhat-operators-jh8r6\" (UID: \"713e5d03-d88c-4586-8505-700d72bc5c63\") " pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:23 crc kubenswrapper[4612]: I1203 08:55:23.168597 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/713e5d03-d88c-4586-8505-700d72bc5c63-utilities\") pod \"redhat-operators-jh8r6\" (UID: \"713e5d03-d88c-4586-8505-700d72bc5c63\") " pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:23 crc kubenswrapper[4612]: I1203 08:55:23.168687 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdp7m\" (UniqueName: \"kubernetes.io/projected/713e5d03-d88c-4586-8505-700d72bc5c63-kube-api-access-pdp7m\") pod \"redhat-operators-jh8r6\" (UID: \"713e5d03-d88c-4586-8505-700d72bc5c63\") " pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:23 crc kubenswrapper[4612]: I1203 08:55:23.168741 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/713e5d03-d88c-4586-8505-700d72bc5c63-catalog-content\") pod \"redhat-operators-jh8r6\" (UID: \"713e5d03-d88c-4586-8505-700d72bc5c63\") " pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:23 crc kubenswrapper[4612]: I1203 08:55:23.169176 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/713e5d03-d88c-4586-8505-700d72bc5c63-utilities\") pod \"redhat-operators-jh8r6\" (UID: \"713e5d03-d88c-4586-8505-700d72bc5c63\") " pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:23 crc kubenswrapper[4612]: I1203 08:55:23.169775 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/713e5d03-d88c-4586-8505-700d72bc5c63-catalog-content\") pod \"redhat-operators-jh8r6\" (UID: \"713e5d03-d88c-4586-8505-700d72bc5c63\") " pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:23 crc kubenswrapper[4612]: I1203 08:55:23.191364 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdp7m\" (UniqueName: \"kubernetes.io/projected/713e5d03-d88c-4586-8505-700d72bc5c63-kube-api-access-pdp7m\") pod \"redhat-operators-jh8r6\" (UID: \"713e5d03-d88c-4586-8505-700d72bc5c63\") " pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:23 crc kubenswrapper[4612]: I1203 08:55:23.242975 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:23 crc kubenswrapper[4612]: I1203 08:55:23.778159 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jh8r6"] Dec 03 08:55:24 crc kubenswrapper[4612]: I1203 08:55:24.669996 4612 generic.go:334] "Generic (PLEG): container finished" podID="713e5d03-d88c-4586-8505-700d72bc5c63" containerID="99d5c63d8031233d58d6c1c74b3d6e1700d07ad5a75d2260958691b878815b64" exitCode=0 Dec 03 08:55:24 crc kubenswrapper[4612]: I1203 08:55:24.670136 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jh8r6" event={"ID":"713e5d03-d88c-4586-8505-700d72bc5c63","Type":"ContainerDied","Data":"99d5c63d8031233d58d6c1c74b3d6e1700d07ad5a75d2260958691b878815b64"} Dec 03 08:55:24 crc kubenswrapper[4612]: I1203 08:55:24.671773 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jh8r6" event={"ID":"713e5d03-d88c-4586-8505-700d72bc5c63","Type":"ContainerStarted","Data":"1c66fdd2a6f068f5a68db1de1b672680f97d1ba6d9fdedb3ed26827a2edefdef"} Dec 03 08:55:24 crc kubenswrapper[4612]: I1203 08:55:24.672888 4612 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 08:55:25 crc kubenswrapper[4612]: I1203 08:55:25.682790 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jh8r6" event={"ID":"713e5d03-d88c-4586-8505-700d72bc5c63","Type":"ContainerStarted","Data":"a3ce6aa809b1c816f0f3cdc87226b310efa2ed9f949cd0e69cacfc7ae7fe339e"} Dec 03 08:55:28 crc kubenswrapper[4612]: I1203 08:55:28.715691 4612 generic.go:334] "Generic (PLEG): container finished" podID="713e5d03-d88c-4586-8505-700d72bc5c63" containerID="a3ce6aa809b1c816f0f3cdc87226b310efa2ed9f949cd0e69cacfc7ae7fe339e" exitCode=0 Dec 03 08:55:28 crc kubenswrapper[4612]: I1203 08:55:28.715989 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jh8r6" event={"ID":"713e5d03-d88c-4586-8505-700d72bc5c63","Type":"ContainerDied","Data":"a3ce6aa809b1c816f0f3cdc87226b310efa2ed9f949cd0e69cacfc7ae7fe339e"} Dec 03 08:55:30 crc kubenswrapper[4612]: I1203 08:55:30.089836 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:55:31 crc kubenswrapper[4612]: I1203 08:55:31.741669 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"17e63b22170b5d8d9e033e05f57e06f019816355fe9033a9ed245f5e718c54b8"} Dec 03 08:55:31 crc kubenswrapper[4612]: I1203 08:55:31.758384 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jh8r6" event={"ID":"713e5d03-d88c-4586-8505-700d72bc5c63","Type":"ContainerStarted","Data":"957e358a026740b343681669c25aea759bdff1cb26ee9464bbffdf0944c08b66"} Dec 03 08:55:31 crc kubenswrapper[4612]: I1203 08:55:31.835311 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jh8r6" podStartSLOduration=3.520529015 podStartE2EDuration="9.83528846s" podCreationTimestamp="2025-12-03 08:55:22 +0000 UTC" firstStartedPulling="2025-12-03 08:55:24.672650779 +0000 UTC m=+5287.846008179" lastFinishedPulling="2025-12-03 08:55:30.987410224 +0000 UTC m=+5294.160767624" observedRunningTime="2025-12-03 08:55:31.803907758 +0000 UTC m=+5294.977265158" watchObservedRunningTime="2025-12-03 08:55:31.83528846 +0000 UTC m=+5295.008645860" Dec 03 08:55:33 crc kubenswrapper[4612]: I1203 08:55:33.243421 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:33 crc kubenswrapper[4612]: I1203 08:55:33.244810 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:34 crc kubenswrapper[4612]: I1203 08:55:34.316118 4612 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jh8r6" podUID="713e5d03-d88c-4586-8505-700d72bc5c63" containerName="registry-server" probeResult="failure" output=< Dec 03 08:55:34 crc kubenswrapper[4612]: timeout: failed to connect service ":50051" within 1s Dec 03 08:55:34 crc kubenswrapper[4612]: > Dec 03 08:55:43 crc kubenswrapper[4612]: I1203 08:55:43.311676 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:43 crc kubenswrapper[4612]: I1203 08:55:43.362531 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:43 crc kubenswrapper[4612]: I1203 08:55:43.548139 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jh8r6"] Dec 03 08:55:44 crc kubenswrapper[4612]: I1203 08:55:44.898121 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jh8r6" podUID="713e5d03-d88c-4586-8505-700d72bc5c63" containerName="registry-server" containerID="cri-o://957e358a026740b343681669c25aea759bdff1cb26ee9464bbffdf0944c08b66" gracePeriod=2 Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.462280 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.552146 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/713e5d03-d88c-4586-8505-700d72bc5c63-catalog-content\") pod \"713e5d03-d88c-4586-8505-700d72bc5c63\" (UID: \"713e5d03-d88c-4586-8505-700d72bc5c63\") " Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.552313 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdp7m\" (UniqueName: \"kubernetes.io/projected/713e5d03-d88c-4586-8505-700d72bc5c63-kube-api-access-pdp7m\") pod \"713e5d03-d88c-4586-8505-700d72bc5c63\" (UID: \"713e5d03-d88c-4586-8505-700d72bc5c63\") " Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.552347 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/713e5d03-d88c-4586-8505-700d72bc5c63-utilities\") pod \"713e5d03-d88c-4586-8505-700d72bc5c63\" (UID: \"713e5d03-d88c-4586-8505-700d72bc5c63\") " Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.555772 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/713e5d03-d88c-4586-8505-700d72bc5c63-utilities" (OuterVolumeSpecName: "utilities") pod "713e5d03-d88c-4586-8505-700d72bc5c63" (UID: "713e5d03-d88c-4586-8505-700d72bc5c63"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.560198 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/713e5d03-d88c-4586-8505-700d72bc5c63-kube-api-access-pdp7m" (OuterVolumeSpecName: "kube-api-access-pdp7m") pod "713e5d03-d88c-4586-8505-700d72bc5c63" (UID: "713e5d03-d88c-4586-8505-700d72bc5c63"). InnerVolumeSpecName "kube-api-access-pdp7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.655587 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdp7m\" (UniqueName: \"kubernetes.io/projected/713e5d03-d88c-4586-8505-700d72bc5c63-kube-api-access-pdp7m\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.655842 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/713e5d03-d88c-4586-8505-700d72bc5c63-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.673726 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/713e5d03-d88c-4586-8505-700d72bc5c63-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "713e5d03-d88c-4586-8505-700d72bc5c63" (UID: "713e5d03-d88c-4586-8505-700d72bc5c63"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.757013 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/713e5d03-d88c-4586-8505-700d72bc5c63-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.912480 4612 generic.go:334] "Generic (PLEG): container finished" podID="713e5d03-d88c-4586-8505-700d72bc5c63" containerID="957e358a026740b343681669c25aea759bdff1cb26ee9464bbffdf0944c08b66" exitCode=0 Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.912540 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jh8r6" event={"ID":"713e5d03-d88c-4586-8505-700d72bc5c63","Type":"ContainerDied","Data":"957e358a026740b343681669c25aea759bdff1cb26ee9464bbffdf0944c08b66"} Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.912578 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jh8r6" event={"ID":"713e5d03-d88c-4586-8505-700d72bc5c63","Type":"ContainerDied","Data":"1c66fdd2a6f068f5a68db1de1b672680f97d1ba6d9fdedb3ed26827a2edefdef"} Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.912604 4612 scope.go:117] "RemoveContainer" containerID="957e358a026740b343681669c25aea759bdff1cb26ee9464bbffdf0944c08b66" Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.912779 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jh8r6" Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.954129 4612 scope.go:117] "RemoveContainer" containerID="a3ce6aa809b1c816f0f3cdc87226b310efa2ed9f949cd0e69cacfc7ae7fe339e" Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.975887 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jh8r6"] Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.983251 4612 scope.go:117] "RemoveContainer" containerID="99d5c63d8031233d58d6c1c74b3d6e1700d07ad5a75d2260958691b878815b64" Dec 03 08:55:45 crc kubenswrapper[4612]: I1203 08:55:45.986019 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jh8r6"] Dec 03 08:55:46 crc kubenswrapper[4612]: I1203 08:55:46.020646 4612 scope.go:117] "RemoveContainer" containerID="957e358a026740b343681669c25aea759bdff1cb26ee9464bbffdf0944c08b66" Dec 03 08:55:46 crc kubenswrapper[4612]: E1203 08:55:46.021035 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"957e358a026740b343681669c25aea759bdff1cb26ee9464bbffdf0944c08b66\": container with ID starting with 957e358a026740b343681669c25aea759bdff1cb26ee9464bbffdf0944c08b66 not found: ID does not exist" containerID="957e358a026740b343681669c25aea759bdff1cb26ee9464bbffdf0944c08b66" Dec 03 08:55:46 crc kubenswrapper[4612]: I1203 08:55:46.021075 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"957e358a026740b343681669c25aea759bdff1cb26ee9464bbffdf0944c08b66"} err="failed to get container status \"957e358a026740b343681669c25aea759bdff1cb26ee9464bbffdf0944c08b66\": rpc error: code = NotFound desc = could not find container \"957e358a026740b343681669c25aea759bdff1cb26ee9464bbffdf0944c08b66\": container with ID starting with 957e358a026740b343681669c25aea759bdff1cb26ee9464bbffdf0944c08b66 not found: ID does not exist" Dec 03 08:55:46 crc kubenswrapper[4612]: I1203 08:55:46.021099 4612 scope.go:117] "RemoveContainer" containerID="a3ce6aa809b1c816f0f3cdc87226b310efa2ed9f949cd0e69cacfc7ae7fe339e" Dec 03 08:55:46 crc kubenswrapper[4612]: E1203 08:55:46.021358 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3ce6aa809b1c816f0f3cdc87226b310efa2ed9f949cd0e69cacfc7ae7fe339e\": container with ID starting with a3ce6aa809b1c816f0f3cdc87226b310efa2ed9f949cd0e69cacfc7ae7fe339e not found: ID does not exist" containerID="a3ce6aa809b1c816f0f3cdc87226b310efa2ed9f949cd0e69cacfc7ae7fe339e" Dec 03 08:55:46 crc kubenswrapper[4612]: I1203 08:55:46.021382 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3ce6aa809b1c816f0f3cdc87226b310efa2ed9f949cd0e69cacfc7ae7fe339e"} err="failed to get container status \"a3ce6aa809b1c816f0f3cdc87226b310efa2ed9f949cd0e69cacfc7ae7fe339e\": rpc error: code = NotFound desc = could not find container \"a3ce6aa809b1c816f0f3cdc87226b310efa2ed9f949cd0e69cacfc7ae7fe339e\": container with ID starting with a3ce6aa809b1c816f0f3cdc87226b310efa2ed9f949cd0e69cacfc7ae7fe339e not found: ID does not exist" Dec 03 08:55:46 crc kubenswrapper[4612]: I1203 08:55:46.021400 4612 scope.go:117] "RemoveContainer" containerID="99d5c63d8031233d58d6c1c74b3d6e1700d07ad5a75d2260958691b878815b64" Dec 03 08:55:46 crc kubenswrapper[4612]: E1203 08:55:46.021636 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99d5c63d8031233d58d6c1c74b3d6e1700d07ad5a75d2260958691b878815b64\": container with ID starting with 99d5c63d8031233d58d6c1c74b3d6e1700d07ad5a75d2260958691b878815b64 not found: ID does not exist" containerID="99d5c63d8031233d58d6c1c74b3d6e1700d07ad5a75d2260958691b878815b64" Dec 03 08:55:46 crc kubenswrapper[4612]: I1203 08:55:46.021663 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99d5c63d8031233d58d6c1c74b3d6e1700d07ad5a75d2260958691b878815b64"} err="failed to get container status \"99d5c63d8031233d58d6c1c74b3d6e1700d07ad5a75d2260958691b878815b64\": rpc error: code = NotFound desc = could not find container \"99d5c63d8031233d58d6c1c74b3d6e1700d07ad5a75d2260958691b878815b64\": container with ID starting with 99d5c63d8031233d58d6c1c74b3d6e1700d07ad5a75d2260958691b878815b64 not found: ID does not exist" Dec 03 08:55:47 crc kubenswrapper[4612]: I1203 08:55:47.100246 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="713e5d03-d88c-4586-8505-700d72bc5c63" path="/var/lib/kubelet/pods/713e5d03-d88c-4586-8505-700d72bc5c63/volumes" Dec 03 08:55:53 crc kubenswrapper[4612]: I1203 08:55:53.991813 4612 generic.go:334] "Generic (PLEG): container finished" podID="5d795c66-eec8-4138-ba0a-1aaa62ce51d6" containerID="48b0227fcb04bfd836e1ea29eebb878f99ca5fe8ecf73caa1184934e5009e6b3" exitCode=0 Dec 03 08:55:53 crc kubenswrapper[4612]: I1203 08:55:53.991889 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m28gf/must-gather-jlc2c" event={"ID":"5d795c66-eec8-4138-ba0a-1aaa62ce51d6","Type":"ContainerDied","Data":"48b0227fcb04bfd836e1ea29eebb878f99ca5fe8ecf73caa1184934e5009e6b3"} Dec 03 08:55:53 crc kubenswrapper[4612]: I1203 08:55:53.993052 4612 scope.go:117] "RemoveContainer" containerID="48b0227fcb04bfd836e1ea29eebb878f99ca5fe8ecf73caa1184934e5009e6b3" Dec 03 08:55:54 crc kubenswrapper[4612]: I1203 08:55:54.972321 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-m28gf_must-gather-jlc2c_5d795c66-eec8-4138-ba0a-1aaa62ce51d6/gather/0.log" Dec 03 08:56:08 crc kubenswrapper[4612]: I1203 08:56:08.956546 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-m28gf/must-gather-jlc2c"] Dec 03 08:56:08 crc kubenswrapper[4612]: I1203 08:56:08.957361 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-m28gf/must-gather-jlc2c" podUID="5d795c66-eec8-4138-ba0a-1aaa62ce51d6" containerName="copy" containerID="cri-o://2824a627fdf0b98e3c430f402d12388fb11d94c8d36f436747b9f890f9fea03b" gracePeriod=2 Dec 03 08:56:08 crc kubenswrapper[4612]: I1203 08:56:08.975294 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-m28gf/must-gather-jlc2c"] Dec 03 08:56:09 crc kubenswrapper[4612]: I1203 08:56:09.161428 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-m28gf_must-gather-jlc2c_5d795c66-eec8-4138-ba0a-1aaa62ce51d6/copy/0.log" Dec 03 08:56:09 crc kubenswrapper[4612]: I1203 08:56:09.161878 4612 generic.go:334] "Generic (PLEG): container finished" podID="5d795c66-eec8-4138-ba0a-1aaa62ce51d6" containerID="2824a627fdf0b98e3c430f402d12388fb11d94c8d36f436747b9f890f9fea03b" exitCode=143 Dec 03 08:56:09 crc kubenswrapper[4612]: I1203 08:56:09.440576 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-m28gf_must-gather-jlc2c_5d795c66-eec8-4138-ba0a-1aaa62ce51d6/copy/0.log" Dec 03 08:56:09 crc kubenswrapper[4612]: I1203 08:56:09.441394 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/must-gather-jlc2c" Dec 03 08:56:09 crc kubenswrapper[4612]: I1203 08:56:09.544274 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5d795c66-eec8-4138-ba0a-1aaa62ce51d6-must-gather-output\") pod \"5d795c66-eec8-4138-ba0a-1aaa62ce51d6\" (UID: \"5d795c66-eec8-4138-ba0a-1aaa62ce51d6\") " Dec 03 08:56:09 crc kubenswrapper[4612]: I1203 08:56:09.544346 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7j4r2\" (UniqueName: \"kubernetes.io/projected/5d795c66-eec8-4138-ba0a-1aaa62ce51d6-kube-api-access-7j4r2\") pod \"5d795c66-eec8-4138-ba0a-1aaa62ce51d6\" (UID: \"5d795c66-eec8-4138-ba0a-1aaa62ce51d6\") " Dec 03 08:56:09 crc kubenswrapper[4612]: I1203 08:56:09.550123 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d795c66-eec8-4138-ba0a-1aaa62ce51d6-kube-api-access-7j4r2" (OuterVolumeSpecName: "kube-api-access-7j4r2") pod "5d795c66-eec8-4138-ba0a-1aaa62ce51d6" (UID: "5d795c66-eec8-4138-ba0a-1aaa62ce51d6"). InnerVolumeSpecName "kube-api-access-7j4r2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:56:09 crc kubenswrapper[4612]: I1203 08:56:09.646588 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7j4r2\" (UniqueName: \"kubernetes.io/projected/5d795c66-eec8-4138-ba0a-1aaa62ce51d6-kube-api-access-7j4r2\") on node \"crc\" DevicePath \"\"" Dec 03 08:56:09 crc kubenswrapper[4612]: I1203 08:56:09.749501 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d795c66-eec8-4138-ba0a-1aaa62ce51d6-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "5d795c66-eec8-4138-ba0a-1aaa62ce51d6" (UID: "5d795c66-eec8-4138-ba0a-1aaa62ce51d6"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:56:09 crc kubenswrapper[4612]: I1203 08:56:09.850429 4612 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5d795c66-eec8-4138-ba0a-1aaa62ce51d6-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 03 08:56:10 crc kubenswrapper[4612]: I1203 08:56:10.172835 4612 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-m28gf_must-gather-jlc2c_5d795c66-eec8-4138-ba0a-1aaa62ce51d6/copy/0.log" Dec 03 08:56:10 crc kubenswrapper[4612]: I1203 08:56:10.173302 4612 scope.go:117] "RemoveContainer" containerID="2824a627fdf0b98e3c430f402d12388fb11d94c8d36f436747b9f890f9fea03b" Dec 03 08:56:10 crc kubenswrapper[4612]: I1203 08:56:10.173430 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m28gf/must-gather-jlc2c" Dec 03 08:56:10 crc kubenswrapper[4612]: I1203 08:56:10.205149 4612 scope.go:117] "RemoveContainer" containerID="48b0227fcb04bfd836e1ea29eebb878f99ca5fe8ecf73caa1184934e5009e6b3" Dec 03 08:56:11 crc kubenswrapper[4612]: I1203 08:56:11.102215 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d795c66-eec8-4138-ba0a-1aaa62ce51d6" path="/var/lib/kubelet/pods/5d795c66-eec8-4138-ba0a-1aaa62ce51d6/volumes" Dec 03 08:57:47 crc kubenswrapper[4612]: I1203 08:57:47.136170 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:57:47 crc kubenswrapper[4612]: I1203 08:57:47.136616 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.145939 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-w5z8f"] Dec 03 08:57:57 crc kubenswrapper[4612]: E1203 08:57:57.146864 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d795c66-eec8-4138-ba0a-1aaa62ce51d6" containerName="gather" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.146880 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d795c66-eec8-4138-ba0a-1aaa62ce51d6" containerName="gather" Dec 03 08:57:57 crc kubenswrapper[4612]: E1203 08:57:57.146898 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d795c66-eec8-4138-ba0a-1aaa62ce51d6" containerName="copy" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.146906 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d795c66-eec8-4138-ba0a-1aaa62ce51d6" containerName="copy" Dec 03 08:57:57 crc kubenswrapper[4612]: E1203 08:57:57.146934 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="713e5d03-d88c-4586-8505-700d72bc5c63" containerName="extract-utilities" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.146962 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="713e5d03-d88c-4586-8505-700d72bc5c63" containerName="extract-utilities" Dec 03 08:57:57 crc kubenswrapper[4612]: E1203 08:57:57.146994 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="713e5d03-d88c-4586-8505-700d72bc5c63" containerName="registry-server" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.147002 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="713e5d03-d88c-4586-8505-700d72bc5c63" containerName="registry-server" Dec 03 08:57:57 crc kubenswrapper[4612]: E1203 08:57:57.147018 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="713e5d03-d88c-4586-8505-700d72bc5c63" containerName="extract-content" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.147026 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="713e5d03-d88c-4586-8505-700d72bc5c63" containerName="extract-content" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.147227 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d795c66-eec8-4138-ba0a-1aaa62ce51d6" containerName="gather" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.147241 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="713e5d03-d88c-4586-8505-700d72bc5c63" containerName="registry-server" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.147261 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d795c66-eec8-4138-ba0a-1aaa62ce51d6" containerName="copy" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.149240 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.159356 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w5z8f"] Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.323988 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wx9m\" (UniqueName: \"kubernetes.io/projected/dacc32d1-4333-415a-9318-59f979bcc4f7-kube-api-access-8wx9m\") pod \"redhat-marketplace-w5z8f\" (UID: \"dacc32d1-4333-415a-9318-59f979bcc4f7\") " pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.324484 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dacc32d1-4333-415a-9318-59f979bcc4f7-catalog-content\") pod \"redhat-marketplace-w5z8f\" (UID: \"dacc32d1-4333-415a-9318-59f979bcc4f7\") " pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.324539 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dacc32d1-4333-415a-9318-59f979bcc4f7-utilities\") pod \"redhat-marketplace-w5z8f\" (UID: \"dacc32d1-4333-415a-9318-59f979bcc4f7\") " pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.426814 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dacc32d1-4333-415a-9318-59f979bcc4f7-catalog-content\") pod \"redhat-marketplace-w5z8f\" (UID: \"dacc32d1-4333-415a-9318-59f979bcc4f7\") " pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.426872 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dacc32d1-4333-415a-9318-59f979bcc4f7-utilities\") pod \"redhat-marketplace-w5z8f\" (UID: \"dacc32d1-4333-415a-9318-59f979bcc4f7\") " pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.426917 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wx9m\" (UniqueName: \"kubernetes.io/projected/dacc32d1-4333-415a-9318-59f979bcc4f7-kube-api-access-8wx9m\") pod \"redhat-marketplace-w5z8f\" (UID: \"dacc32d1-4333-415a-9318-59f979bcc4f7\") " pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.427411 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dacc32d1-4333-415a-9318-59f979bcc4f7-utilities\") pod \"redhat-marketplace-w5z8f\" (UID: \"dacc32d1-4333-415a-9318-59f979bcc4f7\") " pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.427640 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dacc32d1-4333-415a-9318-59f979bcc4f7-catalog-content\") pod \"redhat-marketplace-w5z8f\" (UID: \"dacc32d1-4333-415a-9318-59f979bcc4f7\") " pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.456023 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wx9m\" (UniqueName: \"kubernetes.io/projected/dacc32d1-4333-415a-9318-59f979bcc4f7-kube-api-access-8wx9m\") pod \"redhat-marketplace-w5z8f\" (UID: \"dacc32d1-4333-415a-9318-59f979bcc4f7\") " pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:57:57 crc kubenswrapper[4612]: I1203 08:57:57.468775 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:57:58 crc kubenswrapper[4612]: I1203 08:57:58.071749 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w5z8f"] Dec 03 08:57:58 crc kubenswrapper[4612]: W1203 08:57:58.073355 4612 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddacc32d1_4333_415a_9318_59f979bcc4f7.slice/crio-22d40e6dedaacc4b3d7d112746bae77387390e23918e378f1b67335a0e6caa63 WatchSource:0}: Error finding container 22d40e6dedaacc4b3d7d112746bae77387390e23918e378f1b67335a0e6caa63: Status 404 returned error can't find the container with id 22d40e6dedaacc4b3d7d112746bae77387390e23918e378f1b67335a0e6caa63 Dec 03 08:57:58 crc kubenswrapper[4612]: I1203 08:57:58.224810 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5z8f" event={"ID":"dacc32d1-4333-415a-9318-59f979bcc4f7","Type":"ContainerStarted","Data":"22d40e6dedaacc4b3d7d112746bae77387390e23918e378f1b67335a0e6caa63"} Dec 03 08:57:59 crc kubenswrapper[4612]: I1203 08:57:59.235235 4612 generic.go:334] "Generic (PLEG): container finished" podID="dacc32d1-4333-415a-9318-59f979bcc4f7" containerID="3c137a8433cce089ede2b559e22e4fc1d6460db620b0e1308b463cbd781babc2" exitCode=0 Dec 03 08:57:59 crc kubenswrapper[4612]: I1203 08:57:59.235342 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5z8f" event={"ID":"dacc32d1-4333-415a-9318-59f979bcc4f7","Type":"ContainerDied","Data":"3c137a8433cce089ede2b559e22e4fc1d6460db620b0e1308b463cbd781babc2"} Dec 03 08:58:01 crc kubenswrapper[4612]: I1203 08:58:01.258474 4612 generic.go:334] "Generic (PLEG): container finished" podID="dacc32d1-4333-415a-9318-59f979bcc4f7" containerID="de0b1cbba5c2b2630a8b837feb5cac41ac936016fc8dee3bb6d05d59ad09cb4d" exitCode=0 Dec 03 08:58:01 crc kubenswrapper[4612]: I1203 08:58:01.258527 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5z8f" event={"ID":"dacc32d1-4333-415a-9318-59f979bcc4f7","Type":"ContainerDied","Data":"de0b1cbba5c2b2630a8b837feb5cac41ac936016fc8dee3bb6d05d59ad09cb4d"} Dec 03 08:58:03 crc kubenswrapper[4612]: I1203 08:58:03.280775 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5z8f" event={"ID":"dacc32d1-4333-415a-9318-59f979bcc4f7","Type":"ContainerStarted","Data":"b431c18ce4be1e3949f6d51546e268d6120b361a66916ee355699f226318fd87"} Dec 03 08:58:03 crc kubenswrapper[4612]: I1203 08:58:03.313667 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-w5z8f" podStartSLOduration=3.377679714 podStartE2EDuration="6.313626429s" podCreationTimestamp="2025-12-03 08:57:57 +0000 UTC" firstStartedPulling="2025-12-03 08:57:59.237591505 +0000 UTC m=+5442.410948915" lastFinishedPulling="2025-12-03 08:58:02.17353821 +0000 UTC m=+5445.346895630" observedRunningTime="2025-12-03 08:58:03.302042282 +0000 UTC m=+5446.475399682" watchObservedRunningTime="2025-12-03 08:58:03.313626429 +0000 UTC m=+5446.486983829" Dec 03 08:58:07 crc kubenswrapper[4612]: I1203 08:58:07.469640 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:58:07 crc kubenswrapper[4612]: I1203 08:58:07.470234 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:58:07 crc kubenswrapper[4612]: I1203 08:58:07.537706 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:58:08 crc kubenswrapper[4612]: I1203 08:58:08.390204 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:58:08 crc kubenswrapper[4612]: I1203 08:58:08.469579 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w5z8f"] Dec 03 08:58:10 crc kubenswrapper[4612]: I1203 08:58:10.342039 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-w5z8f" podUID="dacc32d1-4333-415a-9318-59f979bcc4f7" containerName="registry-server" containerID="cri-o://b431c18ce4be1e3949f6d51546e268d6120b361a66916ee355699f226318fd87" gracePeriod=2 Dec 03 08:58:10 crc kubenswrapper[4612]: I1203 08:58:10.798667 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:58:10 crc kubenswrapper[4612]: I1203 08:58:10.999082 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dacc32d1-4333-415a-9318-59f979bcc4f7-utilities\") pod \"dacc32d1-4333-415a-9318-59f979bcc4f7\" (UID: \"dacc32d1-4333-415a-9318-59f979bcc4f7\") " Dec 03 08:58:10 crc kubenswrapper[4612]: I1203 08:58:10.999717 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dacc32d1-4333-415a-9318-59f979bcc4f7-catalog-content\") pod \"dacc32d1-4333-415a-9318-59f979bcc4f7\" (UID: \"dacc32d1-4333-415a-9318-59f979bcc4f7\") " Dec 03 08:58:10 crc kubenswrapper[4612]: I1203 08:58:10.999905 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wx9m\" (UniqueName: \"kubernetes.io/projected/dacc32d1-4333-415a-9318-59f979bcc4f7-kube-api-access-8wx9m\") pod \"dacc32d1-4333-415a-9318-59f979bcc4f7\" (UID: \"dacc32d1-4333-415a-9318-59f979bcc4f7\") " Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.000098 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dacc32d1-4333-415a-9318-59f979bcc4f7-utilities" (OuterVolumeSpecName: "utilities") pod "dacc32d1-4333-415a-9318-59f979bcc4f7" (UID: "dacc32d1-4333-415a-9318-59f979bcc4f7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.001210 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dacc32d1-4333-415a-9318-59f979bcc4f7-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.006022 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dacc32d1-4333-415a-9318-59f979bcc4f7-kube-api-access-8wx9m" (OuterVolumeSpecName: "kube-api-access-8wx9m") pod "dacc32d1-4333-415a-9318-59f979bcc4f7" (UID: "dacc32d1-4333-415a-9318-59f979bcc4f7"). InnerVolumeSpecName "kube-api-access-8wx9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.018175 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dacc32d1-4333-415a-9318-59f979bcc4f7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dacc32d1-4333-415a-9318-59f979bcc4f7" (UID: "dacc32d1-4333-415a-9318-59f979bcc4f7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.103490 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dacc32d1-4333-415a-9318-59f979bcc4f7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.103821 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wx9m\" (UniqueName: \"kubernetes.io/projected/dacc32d1-4333-415a-9318-59f979bcc4f7-kube-api-access-8wx9m\") on node \"crc\" DevicePath \"\"" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.354991 4612 generic.go:334] "Generic (PLEG): container finished" podID="dacc32d1-4333-415a-9318-59f979bcc4f7" containerID="b431c18ce4be1e3949f6d51546e268d6120b361a66916ee355699f226318fd87" exitCode=0 Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.355046 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5z8f" event={"ID":"dacc32d1-4333-415a-9318-59f979bcc4f7","Type":"ContainerDied","Data":"b431c18ce4be1e3949f6d51546e268d6120b361a66916ee355699f226318fd87"} Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.355122 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5z8f" event={"ID":"dacc32d1-4333-415a-9318-59f979bcc4f7","Type":"ContainerDied","Data":"22d40e6dedaacc4b3d7d112746bae77387390e23918e378f1b67335a0e6caa63"} Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.355127 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w5z8f" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.355153 4612 scope.go:117] "RemoveContainer" containerID="b431c18ce4be1e3949f6d51546e268d6120b361a66916ee355699f226318fd87" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.385776 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w5z8f"] Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.387276 4612 scope.go:117] "RemoveContainer" containerID="de0b1cbba5c2b2630a8b837feb5cac41ac936016fc8dee3bb6d05d59ad09cb4d" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.398555 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-w5z8f"] Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.417042 4612 scope.go:117] "RemoveContainer" containerID="3c137a8433cce089ede2b559e22e4fc1d6460db620b0e1308b463cbd781babc2" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.453182 4612 scope.go:117] "RemoveContainer" containerID="b431c18ce4be1e3949f6d51546e268d6120b361a66916ee355699f226318fd87" Dec 03 08:58:11 crc kubenswrapper[4612]: E1203 08:58:11.455641 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b431c18ce4be1e3949f6d51546e268d6120b361a66916ee355699f226318fd87\": container with ID starting with b431c18ce4be1e3949f6d51546e268d6120b361a66916ee355699f226318fd87 not found: ID does not exist" containerID="b431c18ce4be1e3949f6d51546e268d6120b361a66916ee355699f226318fd87" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.455686 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b431c18ce4be1e3949f6d51546e268d6120b361a66916ee355699f226318fd87"} err="failed to get container status \"b431c18ce4be1e3949f6d51546e268d6120b361a66916ee355699f226318fd87\": rpc error: code = NotFound desc = could not find container \"b431c18ce4be1e3949f6d51546e268d6120b361a66916ee355699f226318fd87\": container with ID starting with b431c18ce4be1e3949f6d51546e268d6120b361a66916ee355699f226318fd87 not found: ID does not exist" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.455712 4612 scope.go:117] "RemoveContainer" containerID="de0b1cbba5c2b2630a8b837feb5cac41ac936016fc8dee3bb6d05d59ad09cb4d" Dec 03 08:58:11 crc kubenswrapper[4612]: E1203 08:58:11.455964 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de0b1cbba5c2b2630a8b837feb5cac41ac936016fc8dee3bb6d05d59ad09cb4d\": container with ID starting with de0b1cbba5c2b2630a8b837feb5cac41ac936016fc8dee3bb6d05d59ad09cb4d not found: ID does not exist" containerID="de0b1cbba5c2b2630a8b837feb5cac41ac936016fc8dee3bb6d05d59ad09cb4d" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.455994 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de0b1cbba5c2b2630a8b837feb5cac41ac936016fc8dee3bb6d05d59ad09cb4d"} err="failed to get container status \"de0b1cbba5c2b2630a8b837feb5cac41ac936016fc8dee3bb6d05d59ad09cb4d\": rpc error: code = NotFound desc = could not find container \"de0b1cbba5c2b2630a8b837feb5cac41ac936016fc8dee3bb6d05d59ad09cb4d\": container with ID starting with de0b1cbba5c2b2630a8b837feb5cac41ac936016fc8dee3bb6d05d59ad09cb4d not found: ID does not exist" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.456011 4612 scope.go:117] "RemoveContainer" containerID="3c137a8433cce089ede2b559e22e4fc1d6460db620b0e1308b463cbd781babc2" Dec 03 08:58:11 crc kubenswrapper[4612]: E1203 08:58:11.456228 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c137a8433cce089ede2b559e22e4fc1d6460db620b0e1308b463cbd781babc2\": container with ID starting with 3c137a8433cce089ede2b559e22e4fc1d6460db620b0e1308b463cbd781babc2 not found: ID does not exist" containerID="3c137a8433cce089ede2b559e22e4fc1d6460db620b0e1308b463cbd781babc2" Dec 03 08:58:11 crc kubenswrapper[4612]: I1203 08:58:11.456257 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c137a8433cce089ede2b559e22e4fc1d6460db620b0e1308b463cbd781babc2"} err="failed to get container status \"3c137a8433cce089ede2b559e22e4fc1d6460db620b0e1308b463cbd781babc2\": rpc error: code = NotFound desc = could not find container \"3c137a8433cce089ede2b559e22e4fc1d6460db620b0e1308b463cbd781babc2\": container with ID starting with 3c137a8433cce089ede2b559e22e4fc1d6460db620b0e1308b463cbd781babc2 not found: ID does not exist" Dec 03 08:58:13 crc kubenswrapper[4612]: I1203 08:58:13.122243 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dacc32d1-4333-415a-9318-59f979bcc4f7" path="/var/lib/kubelet/pods/dacc32d1-4333-415a-9318-59f979bcc4f7/volumes" Dec 03 08:58:17 crc kubenswrapper[4612]: I1203 08:58:17.135831 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:58:17 crc kubenswrapper[4612]: I1203 08:58:17.136354 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.746952 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6rbbp"] Dec 03 08:58:37 crc kubenswrapper[4612]: E1203 08:58:37.754826 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dacc32d1-4333-415a-9318-59f979bcc4f7" containerName="extract-utilities" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.755041 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="dacc32d1-4333-415a-9318-59f979bcc4f7" containerName="extract-utilities" Dec 03 08:58:37 crc kubenswrapper[4612]: E1203 08:58:37.755163 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dacc32d1-4333-415a-9318-59f979bcc4f7" containerName="extract-content" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.755267 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="dacc32d1-4333-415a-9318-59f979bcc4f7" containerName="extract-content" Dec 03 08:58:37 crc kubenswrapper[4612]: E1203 08:58:37.755361 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dacc32d1-4333-415a-9318-59f979bcc4f7" containerName="registry-server" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.755432 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="dacc32d1-4333-415a-9318-59f979bcc4f7" containerName="registry-server" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.755740 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="dacc32d1-4333-415a-9318-59f979bcc4f7" containerName="registry-server" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.757215 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.828258 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6rbbp"] Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.830097 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87aefc8b-767f-4892-9acc-0257a5609633-utilities\") pod \"certified-operators-6rbbp\" (UID: \"87aefc8b-767f-4892-9acc-0257a5609633\") " pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.830156 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87aefc8b-767f-4892-9acc-0257a5609633-catalog-content\") pod \"certified-operators-6rbbp\" (UID: \"87aefc8b-767f-4892-9acc-0257a5609633\") " pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.830184 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58rj5\" (UniqueName: \"kubernetes.io/projected/87aefc8b-767f-4892-9acc-0257a5609633-kube-api-access-58rj5\") pod \"certified-operators-6rbbp\" (UID: \"87aefc8b-767f-4892-9acc-0257a5609633\") " pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.931378 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58rj5\" (UniqueName: \"kubernetes.io/projected/87aefc8b-767f-4892-9acc-0257a5609633-kube-api-access-58rj5\") pod \"certified-operators-6rbbp\" (UID: \"87aefc8b-767f-4892-9acc-0257a5609633\") " pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.931747 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87aefc8b-767f-4892-9acc-0257a5609633-utilities\") pod \"certified-operators-6rbbp\" (UID: \"87aefc8b-767f-4892-9acc-0257a5609633\") " pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.931780 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87aefc8b-767f-4892-9acc-0257a5609633-catalog-content\") pod \"certified-operators-6rbbp\" (UID: \"87aefc8b-767f-4892-9acc-0257a5609633\") " pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.932208 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87aefc8b-767f-4892-9acc-0257a5609633-catalog-content\") pod \"certified-operators-6rbbp\" (UID: \"87aefc8b-767f-4892-9acc-0257a5609633\") " pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.932563 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87aefc8b-767f-4892-9acc-0257a5609633-utilities\") pod \"certified-operators-6rbbp\" (UID: \"87aefc8b-767f-4892-9acc-0257a5609633\") " pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:37 crc kubenswrapper[4612]: I1203 08:58:37.976086 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58rj5\" (UniqueName: \"kubernetes.io/projected/87aefc8b-767f-4892-9acc-0257a5609633-kube-api-access-58rj5\") pod \"certified-operators-6rbbp\" (UID: \"87aefc8b-767f-4892-9acc-0257a5609633\") " pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:38 crc kubenswrapper[4612]: I1203 08:58:38.081581 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:38 crc kubenswrapper[4612]: I1203 08:58:38.621360 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6rbbp"] Dec 03 08:58:39 crc kubenswrapper[4612]: I1203 08:58:39.635732 4612 generic.go:334] "Generic (PLEG): container finished" podID="87aefc8b-767f-4892-9acc-0257a5609633" containerID="c0344834875d0a1cc0dc46cfd0ce5bd71133d1bf07b9c0ee81e0304e905be91b" exitCode=0 Dec 03 08:58:39 crc kubenswrapper[4612]: I1203 08:58:39.636267 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6rbbp" event={"ID":"87aefc8b-767f-4892-9acc-0257a5609633","Type":"ContainerDied","Data":"c0344834875d0a1cc0dc46cfd0ce5bd71133d1bf07b9c0ee81e0304e905be91b"} Dec 03 08:58:39 crc kubenswrapper[4612]: I1203 08:58:39.636326 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6rbbp" event={"ID":"87aefc8b-767f-4892-9acc-0257a5609633","Type":"ContainerStarted","Data":"3dbab15d3360e4667c966383a0e4588896c0509ff170bd2ae6ed7436b9772bf3"} Dec 03 08:58:41 crc kubenswrapper[4612]: I1203 08:58:41.656197 4612 generic.go:334] "Generic (PLEG): container finished" podID="87aefc8b-767f-4892-9acc-0257a5609633" containerID="6fac63f082925bd002e1c140b9ff6b0534ffecfa08c2ddba4299a6aaaad29adf" exitCode=0 Dec 03 08:58:41 crc kubenswrapper[4612]: I1203 08:58:41.656269 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6rbbp" event={"ID":"87aefc8b-767f-4892-9acc-0257a5609633","Type":"ContainerDied","Data":"6fac63f082925bd002e1c140b9ff6b0534ffecfa08c2ddba4299a6aaaad29adf"} Dec 03 08:58:45 crc kubenswrapper[4612]: I1203 08:58:45.715147 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6rbbp" event={"ID":"87aefc8b-767f-4892-9acc-0257a5609633","Type":"ContainerStarted","Data":"23a600ef1622bd9233657e2d50c14c1e91ff32fd4fc4d9611d42bf8f774c4533"} Dec 03 08:58:45 crc kubenswrapper[4612]: I1203 08:58:45.742577 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6rbbp" podStartSLOduration=3.625487421 podStartE2EDuration="8.742558557s" podCreationTimestamp="2025-12-03 08:58:37 +0000 UTC" firstStartedPulling="2025-12-03 08:58:39.644261462 +0000 UTC m=+5482.817618902" lastFinishedPulling="2025-12-03 08:58:44.761332628 +0000 UTC m=+5487.934690038" observedRunningTime="2025-12-03 08:58:45.732746143 +0000 UTC m=+5488.906103543" watchObservedRunningTime="2025-12-03 08:58:45.742558557 +0000 UTC m=+5488.915915957" Dec 03 08:58:47 crc kubenswrapper[4612]: I1203 08:58:47.135554 4612 patch_prober.go:28] interesting pod/machine-config-daemon-d8td2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:58:47 crc kubenswrapper[4612]: I1203 08:58:47.135887 4612 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:58:47 crc kubenswrapper[4612]: I1203 08:58:47.135980 4612 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" Dec 03 08:58:47 crc kubenswrapper[4612]: I1203 08:58:47.136705 4612 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"17e63b22170b5d8d9e033e05f57e06f019816355fe9033a9ed245f5e718c54b8"} pod="openshift-machine-config-operator/machine-config-daemon-d8td2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:58:47 crc kubenswrapper[4612]: I1203 08:58:47.136774 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" podUID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerName="machine-config-daemon" containerID="cri-o://17e63b22170b5d8d9e033e05f57e06f019816355fe9033a9ed245f5e718c54b8" gracePeriod=600 Dec 03 08:58:47 crc kubenswrapper[4612]: I1203 08:58:47.736344 4612 generic.go:334] "Generic (PLEG): container finished" podID="bf87dca8-ceaa-424a-8074-7a63c648b84b" containerID="17e63b22170b5d8d9e033e05f57e06f019816355fe9033a9ed245f5e718c54b8" exitCode=0 Dec 03 08:58:47 crc kubenswrapper[4612]: I1203 08:58:47.736417 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerDied","Data":"17e63b22170b5d8d9e033e05f57e06f019816355fe9033a9ed245f5e718c54b8"} Dec 03 08:58:47 crc kubenswrapper[4612]: I1203 08:58:47.736765 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d8td2" event={"ID":"bf87dca8-ceaa-424a-8074-7a63c648b84b","Type":"ContainerStarted","Data":"5460499ec8c069680166b0de86f677d42d7f38ea4867c355fa99fbf12c82bc74"} Dec 03 08:58:47 crc kubenswrapper[4612]: I1203 08:58:47.736797 4612 scope.go:117] "RemoveContainer" containerID="25bb15205783a6bcdbd8a8e3b7990c91040b92e44f809087bc87ad6abd349d5d" Dec 03 08:58:48 crc kubenswrapper[4612]: I1203 08:58:48.082791 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:48 crc kubenswrapper[4612]: I1203 08:58:48.083221 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:48 crc kubenswrapper[4612]: I1203 08:58:48.147680 4612 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:58 crc kubenswrapper[4612]: I1203 08:58:58.145404 4612 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:58 crc kubenswrapper[4612]: I1203 08:58:58.197409 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6rbbp"] Dec 03 08:58:58 crc kubenswrapper[4612]: I1203 08:58:58.835820 4612 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6rbbp" podUID="87aefc8b-767f-4892-9acc-0257a5609633" containerName="registry-server" containerID="cri-o://23a600ef1622bd9233657e2d50c14c1e91ff32fd4fc4d9611d42bf8f774c4533" gracePeriod=2 Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.337337 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.505801 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87aefc8b-767f-4892-9acc-0257a5609633-utilities\") pod \"87aefc8b-767f-4892-9acc-0257a5609633\" (UID: \"87aefc8b-767f-4892-9acc-0257a5609633\") " Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.506255 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87aefc8b-767f-4892-9acc-0257a5609633-catalog-content\") pod \"87aefc8b-767f-4892-9acc-0257a5609633\" (UID: \"87aefc8b-767f-4892-9acc-0257a5609633\") " Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.506336 4612 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58rj5\" (UniqueName: \"kubernetes.io/projected/87aefc8b-767f-4892-9acc-0257a5609633-kube-api-access-58rj5\") pod \"87aefc8b-767f-4892-9acc-0257a5609633\" (UID: \"87aefc8b-767f-4892-9acc-0257a5609633\") " Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.506965 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87aefc8b-767f-4892-9acc-0257a5609633-utilities" (OuterVolumeSpecName: "utilities") pod "87aefc8b-767f-4892-9acc-0257a5609633" (UID: "87aefc8b-767f-4892-9acc-0257a5609633"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.514052 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87aefc8b-767f-4892-9acc-0257a5609633-kube-api-access-58rj5" (OuterVolumeSpecName: "kube-api-access-58rj5") pod "87aefc8b-767f-4892-9acc-0257a5609633" (UID: "87aefc8b-767f-4892-9acc-0257a5609633"). InnerVolumeSpecName "kube-api-access-58rj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.562842 4612 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87aefc8b-767f-4892-9acc-0257a5609633-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "87aefc8b-767f-4892-9acc-0257a5609633" (UID: "87aefc8b-767f-4892-9acc-0257a5609633"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.608263 4612 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87aefc8b-767f-4892-9acc-0257a5609633-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.608294 4612 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87aefc8b-767f-4892-9acc-0257a5609633-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.608304 4612 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58rj5\" (UniqueName: \"kubernetes.io/projected/87aefc8b-767f-4892-9acc-0257a5609633-kube-api-access-58rj5\") on node \"crc\" DevicePath \"\"" Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.851174 4612 generic.go:334] "Generic (PLEG): container finished" podID="87aefc8b-767f-4892-9acc-0257a5609633" containerID="23a600ef1622bd9233657e2d50c14c1e91ff32fd4fc4d9611d42bf8f774c4533" exitCode=0 Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.851249 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6rbbp" event={"ID":"87aefc8b-767f-4892-9acc-0257a5609633","Type":"ContainerDied","Data":"23a600ef1622bd9233657e2d50c14c1e91ff32fd4fc4d9611d42bf8f774c4533"} Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.851296 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6rbbp" event={"ID":"87aefc8b-767f-4892-9acc-0257a5609633","Type":"ContainerDied","Data":"3dbab15d3360e4667c966383a0e4588896c0509ff170bd2ae6ed7436b9772bf3"} Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.851330 4612 scope.go:117] "RemoveContainer" containerID="23a600ef1622bd9233657e2d50c14c1e91ff32fd4fc4d9611d42bf8f774c4533" Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.851558 4612 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6rbbp" Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.902743 4612 scope.go:117] "RemoveContainer" containerID="6fac63f082925bd002e1c140b9ff6b0534ffecfa08c2ddba4299a6aaaad29adf" Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.920530 4612 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6rbbp"] Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.930858 4612 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6rbbp"] Dec 03 08:58:59 crc kubenswrapper[4612]: I1203 08:58:59.936657 4612 scope.go:117] "RemoveContainer" containerID="c0344834875d0a1cc0dc46cfd0ce5bd71133d1bf07b9c0ee81e0304e905be91b" Dec 03 08:59:00 crc kubenswrapper[4612]: I1203 08:58:59.999981 4612 scope.go:117] "RemoveContainer" containerID="23a600ef1622bd9233657e2d50c14c1e91ff32fd4fc4d9611d42bf8f774c4533" Dec 03 08:59:00 crc kubenswrapper[4612]: E1203 08:59:00.001364 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23a600ef1622bd9233657e2d50c14c1e91ff32fd4fc4d9611d42bf8f774c4533\": container with ID starting with 23a600ef1622bd9233657e2d50c14c1e91ff32fd4fc4d9611d42bf8f774c4533 not found: ID does not exist" containerID="23a600ef1622bd9233657e2d50c14c1e91ff32fd4fc4d9611d42bf8f774c4533" Dec 03 08:59:00 crc kubenswrapper[4612]: I1203 08:59:00.001406 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23a600ef1622bd9233657e2d50c14c1e91ff32fd4fc4d9611d42bf8f774c4533"} err="failed to get container status \"23a600ef1622bd9233657e2d50c14c1e91ff32fd4fc4d9611d42bf8f774c4533\": rpc error: code = NotFound desc = could not find container \"23a600ef1622bd9233657e2d50c14c1e91ff32fd4fc4d9611d42bf8f774c4533\": container with ID starting with 23a600ef1622bd9233657e2d50c14c1e91ff32fd4fc4d9611d42bf8f774c4533 not found: ID does not exist" Dec 03 08:59:00 crc kubenswrapper[4612]: I1203 08:59:00.001429 4612 scope.go:117] "RemoveContainer" containerID="6fac63f082925bd002e1c140b9ff6b0534ffecfa08c2ddba4299a6aaaad29adf" Dec 03 08:59:00 crc kubenswrapper[4612]: E1203 08:59:00.001693 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fac63f082925bd002e1c140b9ff6b0534ffecfa08c2ddba4299a6aaaad29adf\": container with ID starting with 6fac63f082925bd002e1c140b9ff6b0534ffecfa08c2ddba4299a6aaaad29adf not found: ID does not exist" containerID="6fac63f082925bd002e1c140b9ff6b0534ffecfa08c2ddba4299a6aaaad29adf" Dec 03 08:59:00 crc kubenswrapper[4612]: I1203 08:59:00.001714 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fac63f082925bd002e1c140b9ff6b0534ffecfa08c2ddba4299a6aaaad29adf"} err="failed to get container status \"6fac63f082925bd002e1c140b9ff6b0534ffecfa08c2ddba4299a6aaaad29adf\": rpc error: code = NotFound desc = could not find container \"6fac63f082925bd002e1c140b9ff6b0534ffecfa08c2ddba4299a6aaaad29adf\": container with ID starting with 6fac63f082925bd002e1c140b9ff6b0534ffecfa08c2ddba4299a6aaaad29adf not found: ID does not exist" Dec 03 08:59:00 crc kubenswrapper[4612]: I1203 08:59:00.001730 4612 scope.go:117] "RemoveContainer" containerID="c0344834875d0a1cc0dc46cfd0ce5bd71133d1bf07b9c0ee81e0304e905be91b" Dec 03 08:59:00 crc kubenswrapper[4612]: E1203 08:59:00.001954 4612 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0344834875d0a1cc0dc46cfd0ce5bd71133d1bf07b9c0ee81e0304e905be91b\": container with ID starting with c0344834875d0a1cc0dc46cfd0ce5bd71133d1bf07b9c0ee81e0304e905be91b not found: ID does not exist" containerID="c0344834875d0a1cc0dc46cfd0ce5bd71133d1bf07b9c0ee81e0304e905be91b" Dec 03 08:59:00 crc kubenswrapper[4612]: I1203 08:59:00.001974 4612 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0344834875d0a1cc0dc46cfd0ce5bd71133d1bf07b9c0ee81e0304e905be91b"} err="failed to get container status \"c0344834875d0a1cc0dc46cfd0ce5bd71133d1bf07b9c0ee81e0304e905be91b\": rpc error: code = NotFound desc = could not find container \"c0344834875d0a1cc0dc46cfd0ce5bd71133d1bf07b9c0ee81e0304e905be91b\": container with ID starting with c0344834875d0a1cc0dc46cfd0ce5bd71133d1bf07b9c0ee81e0304e905be91b not found: ID does not exist" Dec 03 08:59:01 crc kubenswrapper[4612]: I1203 08:59:01.102269 4612 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87aefc8b-767f-4892-9acc-0257a5609633" path="/var/lib/kubelet/pods/87aefc8b-767f-4892-9acc-0257a5609633/volumes" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.151494 4612 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w"] Dec 03 09:00:00 crc kubenswrapper[4612]: E1203 09:00:00.152706 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87aefc8b-767f-4892-9acc-0257a5609633" containerName="extract-utilities" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.152726 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="87aefc8b-767f-4892-9acc-0257a5609633" containerName="extract-utilities" Dec 03 09:00:00 crc kubenswrapper[4612]: E1203 09:00:00.152749 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87aefc8b-767f-4892-9acc-0257a5609633" containerName="registry-server" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.152759 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="87aefc8b-767f-4892-9acc-0257a5609633" containerName="registry-server" Dec 03 09:00:00 crc kubenswrapper[4612]: E1203 09:00:00.152788 4612 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87aefc8b-767f-4892-9acc-0257a5609633" containerName="extract-content" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.152799 4612 state_mem.go:107] "Deleted CPUSet assignment" podUID="87aefc8b-767f-4892-9acc-0257a5609633" containerName="extract-content" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.153210 4612 memory_manager.go:354] "RemoveStaleState removing state" podUID="87aefc8b-767f-4892-9acc-0257a5609633" containerName="registry-server" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.154101 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.156356 4612 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.161066 4612 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.162451 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w"] Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.220756 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrbj7\" (UniqueName: \"kubernetes.io/projected/3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a-kube-api-access-zrbj7\") pod \"collect-profiles-29412540-99g6w\" (UID: \"3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.221108 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a-config-volume\") pod \"collect-profiles-29412540-99g6w\" (UID: \"3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.221213 4612 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a-secret-volume\") pod \"collect-profiles-29412540-99g6w\" (UID: \"3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.323107 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a-config-volume\") pod \"collect-profiles-29412540-99g6w\" (UID: \"3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.323158 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a-secret-volume\") pod \"collect-profiles-29412540-99g6w\" (UID: \"3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.323280 4612 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrbj7\" (UniqueName: \"kubernetes.io/projected/3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a-kube-api-access-zrbj7\") pod \"collect-profiles-29412540-99g6w\" (UID: \"3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.324061 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a-config-volume\") pod \"collect-profiles-29412540-99g6w\" (UID: \"3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.331308 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a-secret-volume\") pod \"collect-profiles-29412540-99g6w\" (UID: \"3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.341866 4612 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrbj7\" (UniqueName: \"kubernetes.io/projected/3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a-kube-api-access-zrbj7\") pod \"collect-profiles-29412540-99g6w\" (UID: \"3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.485418 4612 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w" Dec 03 09:00:00 crc kubenswrapper[4612]: I1203 09:00:00.966088 4612 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w"] Dec 03 09:00:01 crc kubenswrapper[4612]: I1203 09:00:01.500825 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w" event={"ID":"3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a","Type":"ContainerStarted","Data":"a252cb53e290b41029b8f79547e7dbcf847f0d5ef82b06f50a7b55b31dcd2856"} Dec 03 09:00:01 crc kubenswrapper[4612]: I1203 09:00:01.501244 4612 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w" event={"ID":"3ca3d2e4-ff13-40c4-8e59-e6ba8f9d684a","Type":"ContainerStarted","Data":"bbc011b9de7c61fd6ff4c352075cce060cac54ee85b82c6f910c223622dbe019"} Dec 03 09:00:01 crc kubenswrapper[4612]: I1203 09:00:01.523660 4612 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-99g6w" podStartSLOduration=1.523637517 podStartE2EDuration="1.523637517s" podCreationTimestamp="2025-12-03 09:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:00:01.5169242 +0000 UTC m=+5564.690281620" watchObservedRunningTime="2025-12-03 09:00:01.523637517 +0000 UTC m=+5564.696994927" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515113776027024456 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015113776030017365 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015113762521016510 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015113762521015460 5ustar corecore